2016-02-29 19:25:47 +01:00
|
|
|
import os
|
2019-01-22 23:44:17 +01:00
|
|
|
import asyncio
|
|
|
|
import logging
|
|
|
|
import json
|
2019-02-11 00:36:21 +01:00
|
|
|
import time
|
2019-01-22 23:44:17 +01:00
|
|
|
import inspect
|
|
|
|
import typing
|
|
|
|
import base58
|
2019-01-30 20:59:48 +01:00
|
|
|
import random
|
2019-01-22 23:44:17 +01:00
|
|
|
from urllib.parse import urlencode, quote
|
2018-10-18 01:07:17 +02:00
|
|
|
from typing import Callable, Optional, List
|
2018-07-22 03:12:33 +02:00
|
|
|
from binascii import hexlify, unhexlify
|
2019-01-10 19:52:50 +01:00
|
|
|
from traceback import format_exc
|
2019-01-22 23:44:17 +01:00
|
|
|
from aiohttp import web
|
|
|
|
from functools import wraps
|
2019-03-11 14:52:35 +01:00
|
|
|
from torba.client.wallet import Wallet
|
2018-11-04 07:24:41 +01:00
|
|
|
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
from lbrynet import utils
|
|
|
|
from lbrynet.conf import Config, Setting
|
2018-11-12 20:45:41 +01:00
|
|
|
from lbrynet.blob.blob_file import is_valid_blobhash
|
2019-01-30 20:57:09 +01:00
|
|
|
from lbrynet.blob_exchange.downloader import download_blob
|
2019-03-25 03:20:17 +01:00
|
|
|
from lbrynet.error import DownloadSDTimeout, ComponentsNotStarted
|
2019-02-02 02:46:09 +01:00
|
|
|
from lbrynet.error import NullFundsError, NegativeFundsError, ComponentStartConditionNotMet
|
2018-11-09 19:06:02 +01:00
|
|
|
from lbrynet.extras import system_info
|
2019-01-22 23:44:17 +01:00
|
|
|
from lbrynet.extras.daemon import analytics
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
|
2019-01-22 23:44:17 +01:00
|
|
|
from lbrynet.extras.daemon.Components import STREAM_MANAGER_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, UPNP_COMPONENT
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
|
2019-01-22 23:44:17 +01:00
|
|
|
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
|
|
|
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
|
|
|
|
from lbrynet.extras.daemon.undecorated import undecorated
|
2019-03-25 23:30:43 +01:00
|
|
|
from lbrynet.wallet.transaction import Transaction, Output, Input
|
2019-03-25 03:20:17 +01:00
|
|
|
from lbrynet.wallet.account import Account as LBCAccount
|
2019-03-20 06:46:23 +01:00
|
|
|
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
|
|
|
|
from lbrynet.schema.claim import Claim
|
|
|
|
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
|
2019-04-07 20:38:18 +02:00
|
|
|
from lbrynet.extras.daemon.comment_client import jsonrpc_batch, jsonrpc_post, rpc_body
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
if typing.TYPE_CHECKING:
|
2019-03-28 19:51:55 +01:00
|
|
|
from lbrynet.blob.blob_manager import BlobManager
|
2019-01-22 23:44:17 +01:00
|
|
|
from lbrynet.dht.node import Node
|
|
|
|
from lbrynet.extras.daemon.Components import UPnPComponent
|
|
|
|
from lbrynet.extras.daemon.exchange_rate_manager import ExchangeRateManager
|
|
|
|
from lbrynet.extras.daemon.storage import SQLiteStorage
|
2019-03-20 06:46:23 +01:00
|
|
|
from lbrynet.wallet.manager import LbryWalletManager
|
|
|
|
from lbrynet.wallet.ledger import MainNetLedger
|
2019-01-22 23:44:17 +01:00
|
|
|
from lbrynet.stream.stream_manager import StreamManager
|
2017-04-11 04:47:54 +02:00
|
|
|
|
2016-07-25 07:40:26 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def requires(*components, **conditions):
|
|
|
|
if conditions and ["conditions"] != list(conditions.keys()):
|
|
|
|
raise SyntaxError("invalid conditions argument")
|
|
|
|
condition_names = conditions.get("conditions", [])
|
|
|
|
|
|
|
|
def _wrap(fn):
|
|
|
|
@wraps(fn)
|
|
|
|
def _inner(*args, **kwargs):
|
|
|
|
component_manager = args[0].component_manager
|
|
|
|
for condition_name in condition_names:
|
|
|
|
condition_result, err_msg = component_manager.evaluate_condition(condition_name)
|
|
|
|
if not condition_result:
|
|
|
|
raise ComponentStartConditionNotMet(err_msg)
|
|
|
|
if not component_manager.all_components_running(*components):
|
|
|
|
raise ComponentsNotStarted("the following required components have not yet started: "
|
|
|
|
"%s" % json.dumps(components))
|
|
|
|
return fn(*args, **kwargs)
|
|
|
|
return _inner
|
|
|
|
return _wrap
|
|
|
|
|
|
|
|
|
|
|
|
def deprecated(new_command=None):
|
|
|
|
def _deprecated_wrapper(f):
|
|
|
|
f.new_command = new_command
|
|
|
|
f._deprecated = True
|
|
|
|
return f
|
|
|
|
return _deprecated_wrapper
|
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
INITIALIZING_CODE = 'initializing'
|
2016-04-09 08:09:30 +02:00
|
|
|
|
2016-12-30 07:12:20 +01:00
|
|
|
# TODO: make this consistent with the stages in Downloader.py
|
2016-04-26 04:35:21 +02:00
|
|
|
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
|
|
|
|
DOWNLOAD_TIMEOUT_CODE = 'timeout'
|
|
|
|
DOWNLOAD_RUNNING_CODE = 'running'
|
|
|
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
|
|
|
STREAM_STAGES = [
|
2017-01-04 23:10:36 +01:00
|
|
|
(INITIALIZING_CODE, 'Initializing'),
|
2017-01-03 20:13:01 +01:00
|
|
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
|
|
|
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
|
|
|
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
|
|
|
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
|
|
|
|
]
|
|
|
|
|
|
|
|
CONNECTION_STATUS_CONNECTED = 'connected'
|
|
|
|
CONNECTION_STATUS_NETWORK = 'network_connection'
|
|
|
|
CONNECTION_MESSAGES = {
|
|
|
|
CONNECTION_STATUS_CONNECTED: 'No connection problems detected',
|
|
|
|
CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted",
|
|
|
|
}
|
2016-04-18 19:21:53 +02:00
|
|
|
|
2016-12-19 19:27:45 +01:00
|
|
|
SHORT_ID_LEN = 20
|
2018-02-20 04:52:15 +01:00
|
|
|
MAX_UPDATE_FEE_ESTIMATE = 0.3
|
2016-12-19 19:27:45 +01:00
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
def encode_pagination_doc(items):
|
|
|
|
return {
|
|
|
|
"page": "Page number of the current items.",
|
|
|
|
"page_size": "Number of items to show on a page.",
|
|
|
|
"total_pages": "Total number of pages.",
|
|
|
|
"items": [items],
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
async def maybe_paginate(get_records: Callable, get_record_count: Callable,
|
2019-01-22 23:44:17 +01:00
|
|
|
page: Optional[int], page_size: Optional[int], **constraints):
|
2018-10-10 02:46:41 +02:00
|
|
|
if None not in (page, page_size):
|
|
|
|
constraints.update({
|
|
|
|
"offset": page_size * (page-1),
|
|
|
|
"limit": page_size
|
|
|
|
})
|
|
|
|
return {
|
2018-10-16 21:04:20 +02:00
|
|
|
"items": await get_records(**constraints),
|
|
|
|
"total_pages": int(((await get_record_count(**constraints)) + (page_size-1)) / page_size),
|
2018-10-10 02:46:41 +02:00
|
|
|
"page": page, "page_size": page_size
|
|
|
|
}
|
2018-10-16 21:04:20 +02:00
|
|
|
return await get_records(**constraints)
|
2018-10-10 02:46:41 +02:00
|
|
|
|
|
|
|
|
2018-05-25 09:09:13 +02:00
|
|
|
def sort_claim_results(claims):
|
|
|
|
claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout']))
|
|
|
|
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
DHT_HAS_CONTACTS = "dht_has_contacts"
|
|
|
|
WALLET_IS_UNLOCKED = "wallet_is_unlocked"
|
|
|
|
|
|
|
|
|
|
|
|
class DHTHasContacts(RequiredCondition):
|
|
|
|
name = DHT_HAS_CONTACTS
|
|
|
|
component = DHT_COMPONENT
|
|
|
|
message = "your node is not connected to the dht"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def evaluate(component):
|
|
|
|
return len(component.contacts) > 0
|
|
|
|
|
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
class WalletIsUnlocked(RequiredCondition):
|
2018-07-25 00:35:18 +02:00
|
|
|
name = WALLET_IS_UNLOCKED
|
|
|
|
component = WALLET_COMPONENT
|
|
|
|
message = "your wallet is locked"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def evaluate(component):
|
2018-10-16 21:04:20 +02:00
|
|
|
return not component.check_locked()
|
2018-07-25 00:35:18 +02:00
|
|
|
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
class JSONRPCError:
|
|
|
|
# http://www.jsonrpc.org/specification#error_object
|
|
|
|
CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text.
|
|
|
|
CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object.
|
|
|
|
CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available.
|
|
|
|
CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s).
|
|
|
|
CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?)
|
|
|
|
CODE_APPLICATION_ERROR = -32500 # Generic error with our app??
|
|
|
|
CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed
|
|
|
|
|
|
|
|
MESSAGES = {
|
|
|
|
CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.",
|
|
|
|
CODE_INVALID_REQUEST: "JSON data is not a valid Request",
|
|
|
|
CODE_METHOD_NOT_FOUND: "Method Not Found",
|
|
|
|
CODE_INVALID_PARAMS: "Invalid Params",
|
|
|
|
CODE_INTERNAL_ERROR: "Internal Error",
|
|
|
|
CODE_AUTHENTICATION_ERROR: "Authentication Failed",
|
|
|
|
}
|
|
|
|
|
|
|
|
HTTP_CODES = {
|
|
|
|
CODE_INVALID_REQUEST: 400,
|
|
|
|
CODE_PARSE_ERROR: 400,
|
|
|
|
CODE_INVALID_PARAMS: 400,
|
|
|
|
CODE_METHOD_NOT_FOUND: 404,
|
|
|
|
CODE_INTERNAL_ERROR: 500,
|
|
|
|
CODE_APPLICATION_ERROR: 500,
|
|
|
|
CODE_AUTHENTICATION_ERROR: 401,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self, message, code=CODE_APPLICATION_ERROR, traceback=None, data=None):
|
|
|
|
assert isinstance(code, int), "'code' must be an int"
|
|
|
|
assert (data is None or isinstance(data, dict)), "'data' must be None or a dict"
|
|
|
|
self.code = code
|
|
|
|
if message is None:
|
|
|
|
message = self.MESSAGES[code] if code in self.MESSAGES else "API Error"
|
|
|
|
self.message = message
|
|
|
|
self.data = {} if data is None else data
|
|
|
|
self.traceback = []
|
|
|
|
if traceback is not None:
|
|
|
|
trace_lines = traceback.split("\n")
|
|
|
|
for i, t in enumerate(trace_lines):
|
|
|
|
if "--- <exception caught here> ---" in t:
|
|
|
|
if len(trace_lines) > i + 1:
|
|
|
|
self.traceback = [j for j in trace_lines[i+1:] if j]
|
|
|
|
break
|
|
|
|
|
|
|
|
def to_dict(self):
|
|
|
|
return {
|
|
|
|
'code': self.code,
|
|
|
|
'message': self.message,
|
|
|
|
'data': self.traceback
|
|
|
|
}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def create_from_exception(cls, message, code=CODE_APPLICATION_ERROR, traceback=None):
|
|
|
|
return cls(message, code=code, traceback=traceback)
|
|
|
|
|
|
|
|
|
|
|
|
class UnknownAPIMethodError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def jsonrpc_dumps_pretty(obj, **kwargs):
|
|
|
|
if isinstance(obj, JSONRPCError):
|
|
|
|
data = {"jsonrpc": "2.0", "error": obj.to_dict()}
|
|
|
|
else:
|
|
|
|
data = {"jsonrpc": "2.0", "result": obj}
|
|
|
|
return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n"
|
|
|
|
|
|
|
|
|
|
|
|
def trap(err, *to_trap):
|
|
|
|
err.trap(*to_trap)
|
|
|
|
|
|
|
|
|
|
|
|
class JSONRPCServerType(type):
|
|
|
|
def __new__(mcs, name, bases, newattrs):
|
|
|
|
klass = type.__new__(mcs, name, bases, newattrs)
|
|
|
|
klass.callable_methods = {}
|
|
|
|
klass.deprecated_methods = {}
|
|
|
|
|
|
|
|
for methodname in dir(klass):
|
|
|
|
if methodname.startswith("jsonrpc_"):
|
|
|
|
method = getattr(klass, methodname)
|
|
|
|
if not hasattr(method, '_deprecated'):
|
|
|
|
klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method})
|
|
|
|
else:
|
|
|
|
klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method})
|
|
|
|
return klass
|
|
|
|
|
|
|
|
|
|
|
|
class Daemon(metaclass=JSONRPCServerType):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-03-24 03:27:48 +01:00
|
|
|
LBRYnet daemon, a jsonrpc interface to lbry functions
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-24 00:04:16 +01:00
|
|
|
def __init__(self, conf: Config, component_manager: typing.Optional[ComponentManager] = None):
|
2019-01-21 21:55:50 +01:00
|
|
|
self.conf = conf
|
|
|
|
self._node_id = None
|
|
|
|
self._installation_id = None
|
|
|
|
self.session_id = base58.b58encode(utils.generate_id()).decode()
|
2019-03-01 20:48:49 +01:00
|
|
|
self.analytics_manager = analytics.AnalyticsManager(conf, self.installation_id, self.session_id)
|
2018-12-13 04:32:44 +01:00
|
|
|
self.component_manager = component_manager or ComponentManager(
|
2019-01-24 05:46:11 +01:00
|
|
|
conf, analytics_manager=self.analytics_manager,
|
|
|
|
skip_components=conf.components_to_skip or []
|
2018-12-13 04:32:44 +01:00
|
|
|
)
|
2019-01-24 05:46:11 +01:00
|
|
|
self.component_startup_task = None
|
2019-02-03 22:19:29 +01:00
|
|
|
self._connection_status: typing.Tuple[float, bool] = [self.component_manager.loop.time(), False]
|
2019-02-13 20:02:55 +01:00
|
|
|
self.stop_event = asyncio.Event()
|
2016-05-14 23:36:30 +02:00
|
|
|
|
2019-01-09 05:54:18 +01:00
|
|
|
logging.getLogger('aiohttp.access').setLevel(logging.WARN)
|
2019-01-25 15:42:11 +01:00
|
|
|
app = web.Application()
|
|
|
|
app.router.add_get('/lbryapi', self.handle_old_jsonrpc)
|
|
|
|
app.router.add_post('/lbryapi', self.handle_old_jsonrpc)
|
2019-03-31 03:08:34 +02:00
|
|
|
app.router.add_get('/streams', self.handle_streams_index)
|
2019-04-05 06:23:34 +02:00
|
|
|
app.router.add_get('/get/{claim_name}', self.handle_stream_get_request)
|
|
|
|
app.router.add_get('/get/{claim_name}/{claim_id}', self.handle_stream_get_request)
|
2019-03-31 03:08:34 +02:00
|
|
|
app.router.add_get('/stream/{sd_hash}', self.handle_stream_range_request)
|
2019-01-25 15:42:11 +01:00
|
|
|
app.router.add_post('/', self.handle_old_jsonrpc)
|
|
|
|
self.runner = web.AppRunner(app)
|
2019-01-22 23:44:17 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def dht_node(self) -> typing.Optional['Node']:
|
|
|
|
return self.component_manager.get_component(DHT_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def wallet_manager(self) -> typing.Optional['LbryWalletManager']:
|
|
|
|
return self.component_manager.get_component(WALLET_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def storage(self) -> typing.Optional['SQLiteStorage']:
|
|
|
|
return self.component_manager.get_component(DATABASE_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def stream_manager(self) -> typing.Optional['StreamManager']:
|
|
|
|
return self.component_manager.get_component(STREAM_MANAGER_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def exchange_rate_manager(self) -> typing.Optional['ExchangeRateManager']:
|
|
|
|
return self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
2019-03-28 19:51:55 +01:00
|
|
|
def blob_manager(self) -> typing.Optional['BlobManager']:
|
2019-01-22 23:44:17 +01:00
|
|
|
return self.component_manager.get_component(BLOB_COMPONENT)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def upnp(self) -> typing.Optional['UPnPComponent']:
|
|
|
|
return self.component_manager.get_component(UPNP_COMPONENT)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
@classmethod
|
|
|
|
def get_api_definitions(cls):
|
2019-01-23 19:00:58 +01:00
|
|
|
prefix = 'jsonrpc_'
|
2019-01-26 02:38:39 +01:00
|
|
|
not_grouped = ['block_show', 'report_bug', 'routing_table_get']
|
2019-01-23 19:00:58 +01:00
|
|
|
api = {
|
|
|
|
'groups': {
|
|
|
|
group_name[:-len('_DOC')].lower(): getattr(cls, group_name).strip()
|
|
|
|
for group_name in dir(cls) if group_name.endswith('_DOC')
|
|
|
|
},
|
|
|
|
'commands': {}
|
|
|
|
}
|
|
|
|
for jsonrpc_method in dir(cls):
|
|
|
|
if jsonrpc_method.startswith(prefix):
|
|
|
|
full_name = jsonrpc_method[len(prefix):]
|
|
|
|
method = getattr(cls, jsonrpc_method)
|
|
|
|
if full_name in not_grouped:
|
|
|
|
name_parts = [full_name]
|
|
|
|
else:
|
|
|
|
name_parts = full_name.split('_', 1)
|
|
|
|
if len(name_parts) == 1:
|
|
|
|
group = None
|
|
|
|
name, = name_parts
|
|
|
|
elif len(name_parts) == 2:
|
|
|
|
group, name = name_parts
|
|
|
|
assert group in api['groups'],\
|
|
|
|
f"Group {group} does not have doc string for command {full_name}."
|
|
|
|
else:
|
|
|
|
raise NameError(f'Could not parse method name: {jsonrpc_method}')
|
|
|
|
api['commands'][full_name] = {
|
|
|
|
'api_method_name': full_name,
|
|
|
|
'name': name,
|
|
|
|
'group': group,
|
|
|
|
'doc': method.__doc__,
|
|
|
|
'method': method,
|
|
|
|
}
|
|
|
|
if hasattr(method, '_deprecated'):
|
|
|
|
api['commands'][full_name]['replaced_by'] = method.new_command
|
|
|
|
|
|
|
|
for command in api['commands'].values():
|
|
|
|
if 'replaced_by' in command:
|
|
|
|
command['replaced_by'] = api['commands'][command['replaced_by']]
|
|
|
|
|
|
|
|
return api
|
2019-01-21 21:55:50 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def db_revision_file_path(self):
|
|
|
|
return os.path.join(self.conf.data_dir, 'db_revision')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def installation_id(self):
|
|
|
|
install_id_filename = os.path.join(self.conf.data_dir, "install_id")
|
|
|
|
if not self._installation_id:
|
|
|
|
if os.path.isfile(install_id_filename):
|
|
|
|
with open(install_id_filename, "r") as install_id_file:
|
|
|
|
self._installation_id = str(install_id_file.read()).strip()
|
|
|
|
if not self._installation_id:
|
|
|
|
self._installation_id = base58.b58encode(utils.generate_id()).decode()
|
|
|
|
with open(install_id_filename, "w") as install_id_file:
|
|
|
|
install_id_file.write(self._installation_id)
|
|
|
|
return self._installation_id
|
|
|
|
|
|
|
|
def ensure_data_dir(self):
|
|
|
|
if not os.path.isdir(self.conf.data_dir):
|
|
|
|
os.makedirs(self.conf.data_dir)
|
|
|
|
if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")):
|
|
|
|
os.makedirs(os.path.join(self.conf.data_dir, "blobfiles"))
|
|
|
|
return self.conf.data_dir
|
|
|
|
|
|
|
|
def ensure_wallet_dir(self):
|
|
|
|
if not os.path.isdir(self.conf.wallet_dir):
|
|
|
|
os.makedirs(self.conf.wallet_dir)
|
|
|
|
|
|
|
|
def ensure_download_dir(self):
|
|
|
|
if not os.path.isdir(self.conf.download_dir):
|
|
|
|
os.makedirs(self.conf.download_dir)
|
|
|
|
|
2019-02-03 22:19:29 +01:00
|
|
|
async def update_connection_status(self):
|
|
|
|
connected = await utils.async_check_connection()
|
|
|
|
self._connection_status = (self.component_manager.loop.time(), connected)
|
|
|
|
|
|
|
|
async def get_connection_status(self) -> str:
|
|
|
|
if self._connection_status[0] + 300 > self.component_manager.loop.time():
|
|
|
|
if not self._connection_status[1]:
|
|
|
|
await self.update_connection_status()
|
|
|
|
else:
|
|
|
|
await self.update_connection_status()
|
|
|
|
return CONNECTION_STATUS_CONNECTED if self._connection_status[1] else CONNECTION_STATUS_NETWORK
|
|
|
|
|
2019-01-23 22:41:14 +01:00
|
|
|
async def start(self):
|
2019-01-24 05:46:11 +01:00
|
|
|
log.info("Starting LBRYNet Daemon")
|
|
|
|
log.debug("Settings: %s", json.dumps(self.conf.settings_dict, indent=2))
|
|
|
|
log.info("Platform: %s", json.dumps(system_info.get_platform(), indent=2))
|
|
|
|
await self.analytics_manager.send_server_startup()
|
|
|
|
await self.runner.setup()
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
try:
|
2019-01-25 15:42:11 +01:00
|
|
|
site = web.TCPSite(self.runner, self.conf.api_host, self.conf.api_port)
|
|
|
|
await site.start()
|
|
|
|
log.info('lbrynet API listening on TCP %s:%i', *site._server.sockets[0].getsockname()[:2])
|
2019-01-24 05:46:11 +01:00
|
|
|
except OSError as e:
|
2019-01-22 21:40:45 +01:00
|
|
|
log.error('lbrynet API failed to bind TCP %s for listening. Daemon is already running or this port is '
|
|
|
|
'already in use by another application.', self.conf.api)
|
2019-01-24 05:46:11 +01:00
|
|
|
await self.analytics_manager.send_server_startup_error(str(e))
|
|
|
|
raise SystemExit()
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
try:
|
2019-01-24 05:46:11 +01:00
|
|
|
await self.initialize()
|
2019-01-22 23:44:17 +01:00
|
|
|
except asyncio.CancelledError:
|
2018-12-13 04:32:44 +01:00
|
|
|
log.info("shutting down before finished starting")
|
2019-01-24 05:46:11 +01:00
|
|
|
await self.analytics_manager.send_server_startup_error("shutting down before finished starting")
|
|
|
|
await self.stop()
|
|
|
|
except Exception as e:
|
|
|
|
await self.analytics_manager.send_server_startup_error(str(e))
|
2018-12-13 04:32:44 +01:00
|
|
|
log.exception('Failed to start lbrynet-daemon')
|
|
|
|
|
2019-01-24 05:46:11 +01:00
|
|
|
await self.analytics_manager.send_server_startup_success()
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-24 05:46:11 +01:00
|
|
|
async def initialize(self):
|
2019-01-22 18:11:28 +01:00
|
|
|
self.ensure_data_dir()
|
|
|
|
self.ensure_wallet_dir()
|
|
|
|
self.ensure_download_dir()
|
2019-01-24 05:46:11 +01:00
|
|
|
if not self.analytics_manager.is_started:
|
2019-03-11 02:55:33 +01:00
|
|
|
await self.analytics_manager.start()
|
2019-01-24 05:46:11 +01:00
|
|
|
self.component_startup_task = asyncio.create_task(self.component_manager.start())
|
|
|
|
await self.component_startup_task
|
|
|
|
|
|
|
|
async def stop(self):
|
|
|
|
if self.component_startup_task is not None:
|
|
|
|
if self.component_startup_task.done():
|
|
|
|
await self.component_manager.stop()
|
|
|
|
else:
|
|
|
|
self.component_startup_task.cancel()
|
|
|
|
await self.runner.cleanup()
|
|
|
|
if self.analytics_manager.is_started:
|
2019-01-22 05:28:26 +01:00
|
|
|
self.analytics_manager.stop()
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
async def handle_old_jsonrpc(self, request):
|
|
|
|
data = await request.json()
|
2019-04-21 17:06:03 +02:00
|
|
|
include_protobuf = data.get('params', {}).pop('include_protobuf', False)
|
2019-01-10 19:52:50 +01:00
|
|
|
result = await self._process_rpc_call(data)
|
2019-01-24 00:04:16 +01:00
|
|
|
ledger = None
|
|
|
|
if 'wallet' in self.component_manager.get_components_status():
|
|
|
|
# self.ledger only available if wallet component is not skipped
|
|
|
|
ledger = self.ledger
|
2019-01-10 19:52:50 +01:00
|
|
|
return web.Response(
|
2019-04-21 17:06:03 +02:00
|
|
|
text=jsonrpc_dumps_pretty(result, ledger=ledger, include_protobuf=include_protobuf),
|
2019-01-10 19:52:50 +01:00
|
|
|
content_type='application/json'
|
|
|
|
)
|
|
|
|
|
2019-03-31 03:08:34 +02:00
|
|
|
async def handle_streams_index(self, request: web.Request):
|
|
|
|
return web.Response(
|
|
|
|
body="<ul>" + "".join([
|
|
|
|
f'<li><a href="/stream/{sd_hash}">lbry://{stream.claim_name}</a></li>'
|
|
|
|
for sd_hash, stream in self.stream_manager.streams.items()
|
|
|
|
]) + "</ul>",
|
|
|
|
content_type='text/html'
|
|
|
|
)
|
|
|
|
|
2019-04-05 06:23:34 +02:00
|
|
|
async def handle_stream_get_request(self, request: web.Request):
|
|
|
|
name_and_claim_id = request.path.split("/get/")[1]
|
|
|
|
if "/" not in name_and_claim_id:
|
|
|
|
uri = f"lbry://{name_and_claim_id}"
|
|
|
|
else:
|
|
|
|
name, claim_id = name_and_claim_id.split("/")
|
|
|
|
uri = f"lbry://{name}#{claim_id}"
|
|
|
|
stream = await self.jsonrpc_get(uri)
|
2019-04-10 16:27:17 +02:00
|
|
|
return web.HTTPFound(f"/stream/{stream.sd_hash}")
|
2019-04-05 06:23:34 +02:00
|
|
|
|
2019-03-31 03:08:34 +02:00
|
|
|
async def handle_stream_range_request(self, request: web.Request):
|
|
|
|
sd_hash = request.path.split("/stream/")[1]
|
|
|
|
if sd_hash not in self.stream_manager.streams:
|
|
|
|
return web.HTTPNotFound()
|
|
|
|
stream = self.stream_manager.streams[sd_hash]
|
|
|
|
|
|
|
|
get_range = request.headers.get('range', 'bytes=0-')
|
|
|
|
if '=' in get_range:
|
|
|
|
get_range = get_range.split('=')[1]
|
|
|
|
start, end = get_range.split('-')
|
|
|
|
size = 0
|
|
|
|
await self.stream_manager.start_stream(stream)
|
|
|
|
for blob in stream.descriptor.blobs[:-1]:
|
|
|
|
size += 2097152 - 1 if blob.length == 2097152 else blob.length
|
|
|
|
size -= 15 # last padding is unguessable
|
|
|
|
|
|
|
|
start = int(start)
|
|
|
|
end = int(end) if end else size - 1
|
|
|
|
skip_blobs = start // 2097150
|
|
|
|
skip = skip_blobs * 2097151
|
|
|
|
start = skip
|
|
|
|
final_size = end - start + 1
|
|
|
|
|
|
|
|
headers = {
|
|
|
|
'Accept-Ranges': 'bytes',
|
|
|
|
'Content-Range': f'bytes {start}-{end}/{size}',
|
|
|
|
'Content-Length': str(final_size),
|
|
|
|
'Content-Type': stream.mime_type
|
|
|
|
}
|
|
|
|
|
|
|
|
if stream.delayed_stop:
|
|
|
|
stream.delayed_stop.cancel()
|
|
|
|
response = web.StreamResponse(
|
|
|
|
status=206,
|
|
|
|
headers=headers
|
|
|
|
)
|
|
|
|
await response.prepare(request)
|
|
|
|
async for blob_info, decrypted in stream.aiter_read_stream(skip_blobs):
|
|
|
|
await response.write(decrypted)
|
|
|
|
log.info("sent browser blob %i/%i", blob_info.blob_num + 1, len(stream.descriptor.blobs) - 1)
|
|
|
|
return response
|
|
|
|
|
2019-01-10 19:52:50 +01:00
|
|
|
async def _process_rpc_call(self, data):
|
2018-12-13 04:32:44 +01:00
|
|
|
args = data.get('params', {})
|
|
|
|
|
|
|
|
try:
|
|
|
|
function_name = data['method']
|
|
|
|
except KeyError:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
"Missing 'method' value in request.", JSONRPCError.CODE_METHOD_NOT_FOUND
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
fn = self._get_jsonrpc_method(function_name)
|
|
|
|
except UnknownAPIMethodError:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
f"Invalid method requested: {function_name}.", JSONRPCError.CODE_METHOD_NOT_FOUND
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
if args in ([{}], []):
|
2018-12-13 04:32:44 +01:00
|
|
|
_args, _kwargs = (), {}
|
|
|
|
elif isinstance(args, dict):
|
|
|
|
_args, _kwargs = (), args
|
|
|
|
elif len(args) == 1 and isinstance(args[0], dict):
|
|
|
|
# TODO: this is for backwards compatibility. Remove this once API and UI are updated
|
|
|
|
# TODO: also delete EMPTY_PARAMS then
|
|
|
|
_args, _kwargs = (), args[0]
|
|
|
|
elif len(args) == 2 and isinstance(args[0], list) and isinstance(args[1], dict):
|
|
|
|
_args, _kwargs = args
|
|
|
|
else:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
f"Invalid parameters format.", JSONRPCError.CODE_INVALID_PARAMS
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
params_error, erroneous_params = self._check_params(fn, _args, _kwargs)
|
|
|
|
if params_error is not None:
|
|
|
|
params_error_message = '{} for {} command: {}'.format(
|
|
|
|
params_error, function_name, ', '.join(erroneous_params)
|
|
|
|
)
|
|
|
|
log.warning(params_error_message)
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
params_error_message, JSONRPCError.CODE_INVALID_PARAMS
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-10 19:52:50 +01:00
|
|
|
try:
|
|
|
|
result = fn(self, *_args, **_kwargs)
|
|
|
|
if asyncio.iscoroutine(result):
|
|
|
|
result = await result
|
|
|
|
return result
|
2019-02-22 04:36:23 +01:00
|
|
|
except asyncio.CancelledError:
|
|
|
|
log.info("cancelled API call for: %s", function_name)
|
|
|
|
raise
|
2019-01-10 19:52:50 +01:00
|
|
|
except Exception as e: # pylint: disable=broad-except
|
2019-01-22 23:44:17 +01:00
|
|
|
log.exception("error handling api request")
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
str(e), JSONRPCError.CODE_APPLICATION_ERROR, format_exc()
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
def _verify_method_is_callable(self, function_path):
|
|
|
|
if function_path not in self.callable_methods:
|
|
|
|
raise UnknownAPIMethodError(function_path)
|
|
|
|
|
|
|
|
def _get_jsonrpc_method(self, function_path):
|
|
|
|
if function_path in self.deprecated_methods:
|
|
|
|
new_command = self.deprecated_methods[function_path].new_command
|
|
|
|
log.warning('API function \"%s\" is deprecated, please update to use \"%s\"',
|
|
|
|
function_path, new_command)
|
|
|
|
function_path = new_command
|
|
|
|
self._verify_method_is_callable(function_path)
|
|
|
|
return self.callable_methods.get(function_path)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _check_params(function, args_tup, args_dict):
|
|
|
|
argspec = inspect.getfullargspec(undecorated(function))
|
|
|
|
num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults)
|
|
|
|
|
|
|
|
duplicate_params = [
|
|
|
|
duplicate_param
|
|
|
|
for duplicate_param in argspec.args[1:len(args_tup) + 1]
|
|
|
|
if duplicate_param in args_dict
|
|
|
|
]
|
|
|
|
|
|
|
|
if duplicate_params:
|
|
|
|
return 'Duplicate parameters', duplicate_params
|
|
|
|
|
|
|
|
missing_required_params = [
|
|
|
|
required_param
|
|
|
|
for required_param in argspec.args[len(args_tup)+1:-num_optional_params]
|
|
|
|
if required_param not in args_dict
|
|
|
|
]
|
|
|
|
if len(missing_required_params):
|
|
|
|
return 'Missing required parameters', missing_required_params
|
|
|
|
|
|
|
|
extraneous_params = [] if argspec.varkw is not None else [
|
|
|
|
extra_param
|
|
|
|
for extra_param in args_dict
|
|
|
|
if extra_param not in argspec.args[1:]
|
|
|
|
]
|
|
|
|
if len(extraneous_params):
|
|
|
|
return 'Extraneous parameters', extraneous_params
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@property
|
2018-08-26 06:44:23 +02:00
|
|
|
def default_wallet(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
try:
|
|
|
|
return self.wallet_manager.default_wallet
|
|
|
|
except AttributeError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@property
|
2019-03-20 06:46:23 +01:00
|
|
|
def default_account(self) -> Optional[LBCAccount]:
|
2018-08-26 05:20:43 +02:00
|
|
|
try:
|
|
|
|
return self.wallet_manager.default_account
|
|
|
|
except AttributeError:
|
|
|
|
return None
|
|
|
|
|
2018-07-05 04:16:02 +02:00
|
|
|
@property
|
2019-03-20 06:46:23 +01:00
|
|
|
def ledger(self) -> Optional['MainNetLedger']:
|
2018-08-16 01:23:06 +02:00
|
|
|
try:
|
2018-08-26 05:20:43 +02:00
|
|
|
return self.wallet_manager.default_account.ledger
|
2018-08-16 01:23:06 +02:00
|
|
|
except AttributeError:
|
|
|
|
return None
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
async def get_est_cost_from_uri(self, uri: str) -> typing.Optional[float]:
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Resolve a name and return the estimated stream cost
|
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
|
2019-03-25 00:45:54 +01:00
|
|
|
resolved = await self.resolve(uri)
|
2017-06-09 19:47:13 +02:00
|
|
|
if resolved:
|
|
|
|
claim_response = resolved[uri]
|
|
|
|
else:
|
2017-04-11 04:47:54 +02:00
|
|
|
claim_response = None
|
|
|
|
|
|
|
|
if claim_response and 'claim' in claim_response:
|
2017-04-18 18:46:41 +02:00
|
|
|
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
|
2019-03-20 06:46:23 +01:00
|
|
|
claim_value = Claim.from_bytes(claim_response['claim']['value'])
|
|
|
|
if not claim_value.stream.has_fee:
|
2019-01-22 23:44:17 +01:00
|
|
|
return 0.0
|
|
|
|
return round(
|
|
|
|
self.exchange_rate_manager.convert_currency(
|
2019-03-20 06:46:23 +01:00
|
|
|
claim_value.stream.fee.currency, "LBC", claim_value.stream.fee.amount
|
2019-01-22 23:44:17 +01:00
|
|
|
), 5
|
|
|
|
)
|
2017-04-13 18:59:35 +02:00
|
|
|
else:
|
|
|
|
log.warning("Failed to estimate cost for %s", uri)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
############################################################################
|
|
|
|
# #
|
|
|
|
# JSON-RPC API methods start here #
|
|
|
|
# #
|
|
|
|
############################################################################
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_stop(self):
|
|
|
|
"""
|
2019-01-25 00:22:53 +01:00
|
|
|
Stop lbrynet API server.
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Usage:
|
|
|
|
stop
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(string) Shutdown message
|
|
|
|
"""
|
|
|
|
log.info("Shutting down lbrynet daemon")
|
2019-02-13 20:02:55 +01:00
|
|
|
self.stop_event.set()
|
2018-08-30 23:29:49 +02:00
|
|
|
return "Shutting down"
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_status(self):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Get daemon status
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-08-02 23:33:56 +02:00
|
|
|
status
|
2017-06-12 22:19:26 +02:00
|
|
|
|
2018-08-03 19:31:51 +02:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2017-06-12 22:19:26 +02:00
|
|
|
Returns:
|
|
|
|
(dict) lbrynet-daemon status
|
|
|
|
{
|
2018-07-23 22:15:12 +02:00
|
|
|
'installation_id': (str) installation id - base58,
|
|
|
|
'is_running': (bool),
|
|
|
|
'skipped_components': (list) [names of skipped components (str)],
|
|
|
|
'startup_status': { Does not include components which have been skipped
|
2019-02-19 23:26:08 +01:00
|
|
|
'blob_manager': (bool),
|
|
|
|
'blockchain_headers': (bool),
|
2018-07-23 22:15:12 +02:00
|
|
|
'database': (bool),
|
|
|
|
'dht': (bool),
|
2019-02-19 23:26:08 +01:00
|
|
|
'exchange_rate_manager': (bool),
|
2018-07-23 22:15:12 +02:00
|
|
|
'hash_announcer': (bool),
|
|
|
|
'peer_protocol_server': (bool),
|
2019-02-19 23:26:08 +01:00
|
|
|
'stream_manager': (bool),
|
2018-07-23 22:15:12 +02:00
|
|
|
'upnp': (bool),
|
2019-02-19 23:26:08 +01:00
|
|
|
'wallet': (bool),
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
|
|
|
'connection_status': {
|
2018-07-23 22:15:12 +02:00
|
|
|
'code': (str) connection status code,
|
|
|
|
'message': (str) connection status message
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
2018-08-02 23:33:56 +02:00
|
|
|
'blockchain_headers': {
|
|
|
|
'downloading_headers': (bool),
|
|
|
|
'download_progress': (float) 0-100.0
|
|
|
|
},
|
|
|
|
'wallet': {
|
2018-07-23 22:15:12 +02:00
|
|
|
'blocks': (int) local blockchain height,
|
|
|
|
'blocks_behind': (int) remote_height - local_height,
|
|
|
|
'best_blockhash': (str) block hash of most recent block,
|
2018-08-13 04:04:48 +02:00
|
|
|
'is_encrypted': (bool),
|
|
|
|
'is_locked': (bool),
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
2018-07-25 21:33:43 +02:00
|
|
|
'dht': {
|
2018-07-20 23:22:10 +02:00
|
|
|
'node_id': (str) lbry dht node id - hex encoded,
|
|
|
|
'peers_in_routing_table': (int) the number of peers in the routing table,
|
|
|
|
},
|
2018-08-02 23:33:56 +02:00
|
|
|
'blob_manager': {
|
|
|
|
'finished_blobs': (int) number of finished blobs in the blob manager,
|
|
|
|
},
|
|
|
|
'hash_announcer': {
|
|
|
|
'announce_queue_size': (int) number of blobs currently queued to be announced
|
|
|
|
},
|
2019-02-19 23:26:08 +01:00
|
|
|
'stream_manager': {
|
|
|
|
'managed_files': (int) count of files in the stream manager,
|
2018-10-18 21:10:00 +02:00
|
|
|
},
|
|
|
|
'upnp': {
|
2018-10-26 18:42:12 +02:00
|
|
|
'aioupnp_version': (str),
|
2018-10-18 21:10:00 +02:00
|
|
|
'redirects': {
|
|
|
|
<TCP | UDP>: (int) external_port,
|
|
|
|
},
|
|
|
|
'gateway': (str) manufacturer and model,
|
2018-10-18 23:41:49 +02:00
|
|
|
'dht_redirect_set': (bool),
|
|
|
|
'peer_redirect_set': (bool),
|
|
|
|
'external_ip': (str) external ip address,
|
2018-08-02 23:33:56 +02:00
|
|
|
}
|
2017-06-12 22:19:26 +02:00
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2019-02-03 22:19:29 +01:00
|
|
|
connection_code = await self.get_connection_status()
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
response = {
|
2019-01-21 21:55:50 +01:00
|
|
|
'installation_id': self.installation_id,
|
2018-07-20 22:46:15 +02:00
|
|
|
'is_running': all(self.component_manager.get_components_status().values()),
|
2018-07-23 22:15:12 +02:00
|
|
|
'skipped_components': self.component_manager.skip_components,
|
2018-07-24 18:34:58 +02:00
|
|
|
'startup_status': self.component_manager.get_components_status(),
|
2017-01-03 20:13:01 +01:00
|
|
|
'connection_status': {
|
2018-07-20 22:46:15 +02:00
|
|
|
'code': connection_code,
|
|
|
|
'message': CONNECTION_MESSAGES[connection_code],
|
2017-01-03 20:13:01 +01:00
|
|
|
},
|
|
|
|
}
|
2018-07-25 21:32:01 +02:00
|
|
|
for component in self.component_manager.components:
|
2019-01-07 08:52:53 +01:00
|
|
|
status = await component.get_status()
|
2018-07-25 21:32:01 +02:00
|
|
|
if status:
|
|
|
|
response[component.component_name] = status
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2016-04-07 09:12:09 +02:00
|
|
|
|
2016-04-12 04:28:46 +02:00
|
|
|
def jsonrpc_version(self):
|
|
|
|
"""
|
2019-01-25 00:22:53 +01:00
|
|
|
Get lbrynet API server version information
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
version
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of lbry version information
|
|
|
|
{
|
2017-03-15 21:31:58 +01:00
|
|
|
'build': (str) build type (e.g. "dev", "rc", "release"),
|
|
|
|
'ip': (str) remote ip, if available,
|
2017-03-14 00:14:11 +01:00
|
|
|
'lbrynet_version': (str) lbrynet_version,
|
|
|
|
'lbryum_version': (str) lbryum_version,
|
2018-10-30 18:41:38 +01:00
|
|
|
'lbryschema_version': (str) lbryschema_version,
|
2017-03-15 21:31:58 +01:00
|
|
|
'os_release': (str) os release string
|
|
|
|
'os_system': (str) os name
|
|
|
|
'platform': (str) platform string
|
|
|
|
'processor': (str) processor type,
|
|
|
|
'python_version': (str) python version,
|
2017-03-14 00:14:11 +01:00
|
|
|
}
|
2016-04-12 04:28:46 +02:00
|
|
|
"""
|
2018-07-20 21:35:09 +02:00
|
|
|
platform_info = system_info.get_platform()
|
2017-03-15 21:19:11 +01:00
|
|
|
log.info("Get version info: " + json.dumps(platform_info))
|
2018-12-15 21:31:02 +01:00
|
|
|
return platform_info
|
2016-04-09 10:15:58 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
async def jsonrpc_resolve(self, urls: typing.Union[str, list]):
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
2019-03-24 21:55:04 +01:00
|
|
|
Get the claim that a URL refers to.
|
2017-01-02 20:52:24 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-24 21:55:04 +01:00
|
|
|
resolve <urls>...
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2019-03-24 21:55:04 +01:00
|
|
|
--urls=<urls> : (str, list) one or more urls to resolve
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
Returns:
|
2019-03-24 21:55:04 +01:00
|
|
|
Dictionary of results, keyed by url
|
|
|
|
'<url>': {
|
|
|
|
If a resolution error occurs:
|
|
|
|
'error': Error message
|
|
|
|
|
|
|
|
If the url resolves to a channel or a claim in a channel:
|
|
|
|
'certificate': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number (or -1 if unknown),
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
|
|
|
'permanent_url': (str) permanent url of the certificate claim,
|
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
|
|
|
|
If the url resolves to a channel:
|
|
|
|
'claims_in_channel': (int) number of claims in the channel,
|
|
|
|
|
|
|
|
If the url resolves to a claim:
|
|
|
|
'claim': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number (or -1 if unknown),
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
|
|
|
'permanent_url': (str) permanent url of the claim,
|
|
|
|
'channel_name': (str) channel name if claim is in a channel
|
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}]
|
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
}
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
if isinstance(urls, str):
|
|
|
|
urls = [urls]
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
|
|
|
|
valid_urls = set()
|
|
|
|
for u in urls:
|
|
|
|
try:
|
|
|
|
parse_lbry_uri(u)
|
|
|
|
valid_urls.add(u)
|
|
|
|
except URIParseError:
|
|
|
|
results[u] = {"error": "%s is not a valid url" % u}
|
|
|
|
|
2019-03-25 00:45:54 +01:00
|
|
|
resolved = await self.resolve(*tuple(valid_urls))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
for resolved_uri in resolved:
|
|
|
|
results[resolved_uri] = resolved[resolved_uri]
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
STREAM_MANAGER_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2019-04-05 06:22:17 +02:00
|
|
|
async def jsonrpc_get(self, uri, file_name=None, timeout=None, save_file=None):
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
|
|
|
Download stream from a LBRY name.
|
|
|
|
|
|
|
|
Usage:
|
2019-04-05 06:22:17 +02:00
|
|
|
get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>] [--save_file]
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--uri=<uri> : (str) uri of the content to download
|
2019-04-05 06:22:17 +02:00
|
|
|
--file_name=<file_name> : (str) specified name for the downloaded file, overrides the stream file name
|
2019-03-24 21:55:04 +01:00
|
|
|
--timeout=<timeout> : (int) download timeout in number of seconds
|
2019-04-05 06:22:17 +02:00
|
|
|
--save_file : (bool) save the file to the downloads directory
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-04-06 21:55:08 +02:00
|
|
|
Returns: {File}
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
2019-04-05 06:22:17 +02:00
|
|
|
save_file = save_file if save_file is not None else not self.conf.streaming_only
|
2019-03-24 21:55:04 +01:00
|
|
|
try:
|
|
|
|
stream = await self.stream_manager.download_stream_from_uri(
|
2019-04-05 06:22:17 +02:00
|
|
|
uri, self.exchange_rate_manager, timeout, file_name, save_file=save_file
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
|
|
|
if not stream:
|
|
|
|
raise DownloadSDTimeout(uri)
|
|
|
|
except Exception as e:
|
|
|
|
log.warning("Error downloading %s: %s", uri, str(e))
|
|
|
|
return {"error": str(e)}
|
2019-04-06 21:55:08 +02:00
|
|
|
return stream
|
2017-01-02 20:52:24 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
SETTINGS_DOC = """
|
|
|
|
Settings management.
|
|
|
|
"""
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_settings_get(self):
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Get daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
settings_get
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of daemon settings
|
2017-03-20 20:36:40 +01:00
|
|
|
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2019-01-21 21:55:50 +01:00
|
|
|
return self.conf.settings_dict
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2019-01-26 05:35:48 +01:00
|
|
|
def jsonrpc_settings_set(self, key, value):
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Set daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-06-22 00:17:54 +02:00
|
|
|
Usage:
|
2019-02-19 23:26:08 +01:00
|
|
|
settings_set (<key>) (<value>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
2017-06-22 00:17:54 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-20 20:36:40 +01:00
|
|
|
(dict) Updated dictionary of daemon settings
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
2019-01-21 21:55:50 +01:00
|
|
|
with self.conf.update_config() as c:
|
2019-01-26 05:35:48 +01:00
|
|
|
attr: Setting = getattr(type(c), key)
|
|
|
|
cleaned = attr.deserialize(value)
|
|
|
|
setattr(c, key, cleaned)
|
|
|
|
return {key: cleaned}
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
ACCOUNT_DOC = """
|
2019-03-26 03:06:36 +01:00
|
|
|
Create, modify and inspect wallet accounts.
|
2019-01-21 21:55:50 +01:00
|
|
|
"""
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@requires("wallet")
|
2018-10-17 16:51:22 +02:00
|
|
|
def jsonrpc_account_list(self, account_id=None, confirmations=6,
|
2018-08-30 06:04:25 +02:00
|
|
|
include_claims=False, show_seed=False):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-08-30 06:04:25 +02:00
|
|
|
List details of all of the accounts or a specific account.
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-08-30 06:04:25 +02:00
|
|
|
account_list [<account_id>] [--confirmations=<confirmations>]
|
2018-10-17 16:51:22 +02:00
|
|
|
[--include_claims] [--show_seed]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
2018-08-26 05:20:43 +02:00
|
|
|
account will be given
|
2018-10-17 16:51:22 +02:00
|
|
|
--confirmations=<confirmations> : (int) required confirmations (default: 0)
|
2018-08-26 05:20:43 +02:00
|
|
|
--include_claims : (bool) include claims, requires than a
|
|
|
|
LBC account is specified (default: false)
|
2018-08-30 06:04:25 +02:00
|
|
|
--show_seed : (bool) show the seed for the account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {List[Account]}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-10-17 16:51:22 +02:00
|
|
|
kwargs = {
|
|
|
|
'confirmations': confirmations,
|
|
|
|
'show_seed': show_seed
|
|
|
|
}
|
2018-08-30 06:04:25 +02:00
|
|
|
if account_id:
|
2018-10-17 16:51:22 +02:00
|
|
|
return self.get_account_or_error(account_id).get_details(**kwargs)
|
2018-08-26 05:20:43 +02:00
|
|
|
else:
|
2018-10-17 16:51:22 +02:00
|
|
|
return self.wallet_manager.get_detailed_accounts(**kwargs)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_balance(self, account_id=None, confirmations=0):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-08-26 05:20:43 +02:00
|
|
|
Return the balance of an account
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-10-09 16:30:30 +02:00
|
|
|
account_balance [<account_id>] [<address> | --address=<address>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-10-08 16:41:07 +02:00
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
|
|
|
account will be given. Otherwise default account.
|
|
|
|
--confirmations=<confirmations> : (int) Only include transactions with this many
|
|
|
|
confirmed blocks.
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2018-09-24 05:22:25 +02:00
|
|
|
(decimal) amount of lbry credits in wallet
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-16 21:04:20 +02:00
|
|
|
dewies = await account.get_balance(confirmations=confirmations)
|
2018-10-03 22:38:47 +02:00
|
|
|
return dewies_to_lbc(dewies)
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_add(
|
2018-08-26 05:20:43 +02:00
|
|
|
self, account_name, single_key=False, seed=None, private_key=None, public_key=None):
|
|
|
|
"""
|
|
|
|
Add a previously created account from a seed, private key or public key (read-only).
|
|
|
|
Specify --single_key for single address or vanity address accounts.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_add (<account_name> | --account_name=<account_name>)
|
|
|
|
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
|
|
|
|
[--single_key]
|
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_name=<account_name> : (str) name of the account to add
|
|
|
|
--seed=<seed> : (str) seed to generate new account from
|
|
|
|
--private_key=<private_key> : (str) private key for new account
|
|
|
|
--public_key=<public_key> : (str) public key for new account
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Account}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
account = LBCAccount.from_dict(
|
2018-08-30 06:04:25 +02:00
|
|
|
self.ledger, self.default_wallet, {
|
2018-08-26 05:20:43 +02:00
|
|
|
'name': account_name,
|
|
|
|
'seed': seed,
|
|
|
|
'private_key': private_key,
|
|
|
|
'public_key': public_key,
|
|
|
|
'address_generator': {
|
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
|
|
|
if self.ledger.network.is_connected:
|
2018-11-20 01:23:23 +01:00
|
|
|
await self.ledger.subscribe_account(account)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
return account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_create(self, account_name, single_key=False):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Create a new account. Specify --single_key if you want to use
|
|
|
|
the same address for all transactions (not recommended).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_create (<account_name> | --account_name=<account_name>) [--single_key]
|
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_name=<account_name> : (str) name of the account to create
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Account}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
account = LBCAccount.generate(
|
2018-08-30 06:04:25 +02:00
|
|
|
self.ledger, self.default_wallet, account_name, {
|
2018-08-26 05:20:43 +02:00
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
|
|
|
if self.ledger.network.is_connected:
|
2018-11-20 01:23:23 +01:00
|
|
|
await self.ledger.subscribe_account(account)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
return account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-08-30 06:04:25 +02:00
|
|
|
def jsonrpc_account_remove(self, account_id):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Remove an existing account.
|
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
account_remove (<account_id> | --account_id=<account_id>)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to remove
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Account}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_error(account_id)
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.accounts.remove(account)
|
|
|
|
self.default_wallet.save()
|
2019-04-06 21:33:07 +02:00
|
|
|
return account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
|
|
|
def jsonrpc_account_set(
|
2018-08-30 06:04:25 +02:00
|
|
|
self, account_id, default=False, new_name=None,
|
2018-08-26 05:20:43 +02:00
|
|
|
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
|
|
|
|
"""
|
|
|
|
Change various settings on an account.
|
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
account_set (<account_id> | --account_id=<account_id>)
|
2018-08-30 06:04:25 +02:00
|
|
|
[--default] [--new_name=<new_name>]
|
|
|
|
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
|
|
|
|
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to change
|
2018-08-26 05:20:43 +02:00
|
|
|
--default : (bool) make this account the default
|
2018-08-30 06:04:25 +02:00
|
|
|
--new_name=<new_name> : (str) new name for the account
|
2018-08-26 05:20:43 +02:00
|
|
|
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
|
|
|
|
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a receiving address
|
|
|
|
--change_gap=<change_gap> : (int) set the gap for change addresses
|
|
|
|
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a change address
|
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Account}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_error(account_id)
|
2018-08-26 05:20:43 +02:00
|
|
|
change_made = False
|
|
|
|
|
|
|
|
if account.receiving.name == HierarchicalDeterministic.name:
|
|
|
|
address_changes = {
|
|
|
|
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
|
|
|
|
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
|
|
|
|
}
|
|
|
|
for chain_name in address_changes:
|
|
|
|
chain = getattr(account, chain_name)
|
|
|
|
for attr, value in address_changes[chain_name].items():
|
|
|
|
if value is not None:
|
|
|
|
setattr(chain, attr, value)
|
|
|
|
change_made = True
|
|
|
|
|
2018-08-30 06:04:25 +02:00
|
|
|
if new_name is not None:
|
|
|
|
account.name = new_name
|
|
|
|
change_made = True
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
if default:
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.accounts.remove(account)
|
|
|
|
self.default_wallet.accounts.insert(0, account)
|
2018-08-26 05:20:43 +02:00
|
|
|
change_made = True
|
|
|
|
|
|
|
|
if change_made:
|
2019-03-12 14:42:28 +01:00
|
|
|
account.modified_on = time.time()
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
return account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_unlock(self, password, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2018-08-26 05:20:43 +02:00
|
|
|
Unlock an encrypted account
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2018-09-25 15:41:31 +02:00
|
|
|
account_unlock (<password> | --password=<password>) [<account_id> | --account_id=<account_id>]
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Options:
|
2019-04-22 20:06:21 +02:00
|
|
|
--password=<password> : (str) password to use for unlocking
|
|
|
|
--account_id=<account_id> : (str) id for the account to unlock, unlocks default account
|
|
|
|
if not provided
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Returns:
|
2018-08-26 05:20:43 +02:00
|
|
|
(bool) true if account is unlocked, otherwise false
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
return self.wallet_manager.unlock_account(
|
|
|
|
password, self.get_account_or_default(account_id, lbc_only=False)
|
|
|
|
)
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-09-25 15:41:41 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
def jsonrpc_account_lock(self, account_id=None):
|
|
|
|
"""
|
|
|
|
Lock an unlocked account
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_lock [<account_id> | --account_id=<account_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id for the account to lock
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if account is locked, otherwise false
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.wallet_manager.lock_account(self.get_account_or_default(account_id, lbc_only=False))
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_decrypt(self, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2019-01-14 20:43:36 +01:00
|
|
|
Decrypt an encrypted account, this will remove the wallet password. The account must be unlocked to decrypt it
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2018-09-25 15:41:31 +02:00
|
|
|
account_decrypt [<account_id> | --account_id=<account_id>]
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-25 15:41:31 +02:00
|
|
|
--account_id=<account_id> : (str) id for the account to decrypt
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-12-17 07:00:12 +01:00
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
return self.wallet_manager.decrypt_account(self.get_account_or_default(account_id, lbc_only=False))
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_encrypt(self, new_password, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
Encrypt an unencrypted account with a password
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
account_encrypt (<new_password> | --new_password=<new_password>)
|
|
|
|
[<account_id> | --account_id=<account_id>]
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Options:
|
2019-04-22 20:06:21 +02:00
|
|
|
--new_password=<new_password> : (str) password to encrypt account
|
|
|
|
--account_id=<account_id> : (str) id for the account to encrypt, encrypts
|
|
|
|
default account if not provided
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
return self.wallet_manager.encrypt_account(
|
|
|
|
new_password,
|
|
|
|
self.get_account_or_default(account_id, lbc_only=False)
|
|
|
|
)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-09-21 15:47:06 +02:00
|
|
|
def jsonrpc_account_max_address_gap(self, account_id):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Finds ranges of consecutive addresses that are unused and returns the length
|
|
|
|
of the longest such range: for change and receiving address chains. This is
|
|
|
|
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
|
|
|
|
account settings.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
account_max_address_gap (<account_id> | --account_id=<account_id>)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) account for which to get max gaps
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) maximum gap for change and receiving addresses
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_error(account_id).get_max_gap()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-11-21 00:21:53 +01:00
|
|
|
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
|
2018-08-26 05:20:43 +02:00
|
|
|
everything=False, outputs=1, broadcast=False):
|
|
|
|
"""
|
|
|
|
Transfer some amount (or --everything) to an account from another
|
|
|
|
account (can be the same account). Amounts are interpreted as LBC.
|
|
|
|
You can also spread the transfer across a number of --outputs (cannot
|
|
|
|
be used together with --everything).
|
|
|
|
|
|
|
|
Usage:
|
2018-11-21 00:21:53 +01:00
|
|
|
account_fund [<to_account> | --to_account=<to_account>]
|
|
|
|
[<from_account> | --from_account=<from_account>]
|
2018-08-26 05:20:43 +02:00
|
|
|
(<amount> | --amount=<amount> | --everything)
|
|
|
|
[<outputs> | --outputs=<outputs>]
|
|
|
|
[--broadcast]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--to_account=<to_account> : (str) send to this account
|
|
|
|
--from_account=<from_account> : (str) spend from this account
|
|
|
|
--amount=<amount> : (str) the amount to transfer lbc
|
|
|
|
--everything : (bool) transfer everything (excluding claims), default: false.
|
|
|
|
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
|
|
|
|
--broadcast : (bool) actually broadcast the transaction, default: false.
|
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Transaction}
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-11-21 00:21:53 +01:00
|
|
|
to_account = self.get_account_or_default(to_account, 'to_account')
|
|
|
|
from_account = self.get_account_or_default(from_account, 'from_account')
|
2018-08-26 05:20:43 +02:00
|
|
|
amount = self.get_dewies_or_error('amount', amount) if amount else None
|
|
|
|
if not isinstance(outputs, int):
|
|
|
|
raise ValueError("--outputs must be an integer.")
|
|
|
|
if everything and outputs > 1:
|
|
|
|
raise ValueError("Using --everything along with --outputs is not supported.")
|
|
|
|
return from_account.fund(
|
|
|
|
to_account=to_account, amount=amount, everything=everything,
|
|
|
|
outputs=outputs, broadcast=broadcast
|
|
|
|
)
|
|
|
|
|
2018-11-21 00:21:53 +01:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2019-03-25 00:45:54 +01:00
|
|
|
async def jsonrpc_account_send(self, amount, addresses, account_id=None, preview=False):
|
2018-11-21 00:21:53 +01:00
|
|
|
"""
|
|
|
|
Send the same number of credits to multiple addresses.
|
|
|
|
|
|
|
|
Usage:
|
2019-03-25 00:45:54 +01:00
|
|
|
account_send <amount> <addresses>... [--account_id=<account_id>] [--preview]
|
2018-11-21 00:21:53 +01:00
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) account to fund the transaction
|
2019-03-25 00:45:54 +01:00
|
|
|
--preview : (bool) do not broadcast the transaction
|
2018-11-21 00:21:53 +01:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Transaction}
|
2018-11-21 00:21:53 +01:00
|
|
|
"""
|
2019-03-25 00:45:54 +01:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-11-21 00:21:53 +01:00
|
|
|
|
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
|
|
|
if not amount:
|
|
|
|
raise NullFundsError
|
2019-02-28 18:04:02 +01:00
|
|
|
if amount < 0:
|
2018-11-21 00:21:53 +01:00
|
|
|
raise NegativeFundsError()
|
|
|
|
|
2019-03-25 00:45:54 +01:00
|
|
|
if addresses and not isinstance(addresses, list):
|
|
|
|
addresses = [addresses]
|
|
|
|
|
|
|
|
outputs = []
|
2018-11-21 00:21:53 +01:00
|
|
|
for address in addresses:
|
2019-03-25 00:45:54 +01:00
|
|
|
self.valid_address_or_error(address)
|
|
|
|
outputs.append(
|
|
|
|
Output.pay_pubkey_hash(
|
|
|
|
amount, self.ledger.address_to_hash160(address)
|
|
|
|
)
|
|
|
|
)
|
2018-11-21 00:21:53 +01:00
|
|
|
|
2019-03-25 00:45:54 +01:00
|
|
|
tx = await Transaction.create(
|
|
|
|
[], outputs, [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.ledger.broadcast(tx)
|
|
|
|
await self.analytics_manager.send_credits_sent()
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
2018-11-21 00:21:53 +01:00
|
|
|
|
2019-03-12 14:42:28 +01:00
|
|
|
SYNC_DOC = """
|
|
|
|
Wallet synchronization.
|
|
|
|
"""
|
|
|
|
|
2019-02-11 00:36:21 +01:00
|
|
|
@requires("wallet")
|
2019-03-11 14:52:35 +01:00
|
|
|
def jsonrpc_sync_hash(self):
|
2019-02-11 00:36:21 +01:00
|
|
|
"""
|
2019-03-11 14:52:35 +01:00
|
|
|
Deterministic hash of the wallet.
|
2019-02-11 00:36:21 +01:00
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
sync_hash
|
2019-02-11 00:36:21 +01:00
|
|
|
|
|
|
|
Options:
|
|
|
|
|
|
|
|
Returns:
|
2019-03-11 14:52:35 +01:00
|
|
|
(str) sha256 hash of wallet
|
|
|
|
"""
|
|
|
|
return hexlify(self.default_wallet.hash).decode()
|
2019-02-11 00:36:21 +01:00
|
|
|
|
2019-03-11 14:52:35 +01:00
|
|
|
@requires("wallet")
|
|
|
|
def jsonrpc_sync_apply(self, password, data=None, encrypt_password=None):
|
2019-02-11 00:36:21 +01:00
|
|
|
"""
|
2019-03-11 14:52:35 +01:00
|
|
|
Apply incoming synchronization data, if provided, and then produce a sync hash and
|
|
|
|
an encrypted wallet.
|
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
sync_apply <password> [--data=<data>] [--encrypt-password=<encrypt_password>]
|
2019-03-11 14:52:35 +01:00
|
|
|
|
|
|
|
Options:
|
|
|
|
--password=<password> : (str) password to decrypt incoming and encrypt outgoing data
|
|
|
|
--data=<data> : (str) incoming sync data, if any
|
|
|
|
--encrypt-password=<encrypt_password> : (str) password to encrypt outgoing data if different
|
|
|
|
from the decrypt password, used during password changes
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) sync hash and data
|
|
|
|
|
|
|
|
"""
|
|
|
|
if data is not None:
|
|
|
|
decrypted_data = Wallet.unpack(password, data)
|
|
|
|
for account_data in decrypted_data['accounts']:
|
|
|
|
_, _, pubkey = LBCAccount.keys_from_dict(self.ledger, account_data)
|
|
|
|
account_id = pubkey.address
|
|
|
|
local_match = None
|
|
|
|
for local_account in self.default_wallet.accounts:
|
|
|
|
if account_id == local_account.id:
|
|
|
|
local_match = local_account
|
|
|
|
break
|
|
|
|
if local_match is not None:
|
2019-03-12 20:31:54 +01:00
|
|
|
local_match.apply(account_data)
|
2019-03-11 14:52:35 +01:00
|
|
|
else:
|
|
|
|
new_account = LBCAccount.from_dict(self.ledger, self.default_wallet, account_data)
|
|
|
|
if self.ledger.network.is_connected:
|
|
|
|
asyncio.create_task(self.ledger.subscribe_account(new_account))
|
2019-03-12 20:31:54 +01:00
|
|
|
self.default_wallet.save()
|
2019-03-11 14:52:35 +01:00
|
|
|
|
|
|
|
encrypted = self.default_wallet.pack(encrypt_password or password)
|
2019-02-11 00:36:21 +01:00
|
|
|
return {
|
2019-03-11 14:52:35 +01:00
|
|
|
'hash': self.jsonrpc_sync_hash(),
|
|
|
|
'data': encrypted.decode()
|
2019-02-11 00:36:21 +01:00
|
|
|
}
|
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
ADDRESS_DOC = """
|
2019-03-26 03:06:36 +01:00
|
|
|
List, generate and verify addresses.
|
2019-01-21 21:55:50 +01:00
|
|
|
"""
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2019-03-25 00:45:54 +01:00
|
|
|
async def jsonrpc_address_is_mine(self, address, account_id=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Checks if an address is associated with the current wallet.
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
address_is_mine (<address> | --address=<address>)
|
2018-09-24 05:22:25 +02:00
|
|
|
[<account_id> | --account_id=<account_id>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--address=<address> : (str) address to check
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true, if address is associated with current wallet
|
|
|
|
"""
|
2019-03-25 00:45:54 +01:00
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
match = await self.ledger.db.get_address(address=address, account=account)
|
|
|
|
if match is not None:
|
|
|
|
return True
|
|
|
|
return False
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_address_list(self, account_id=None, page=None, page_size=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
List account addresses
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
address_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Paginated[Address]}
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_addresses,
|
|
|
|
account.get_address_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
2018-09-21 15:47:06 +02:00
|
|
|
def jsonrpc_address_unused(self, account_id=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Return an address containing no balance, will create
|
|
|
|
a new address if there is none.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
address_unused [--account_id=<account_id>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Address}
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_default(account_id).receiving.get_or_create_usable_address()
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
FILE_DOC = """
|
|
|
|
File management.
|
|
|
|
"""
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
@requires(STREAM_MANAGER_COMPONENT)
|
|
|
|
def jsonrpc_file_list(self, sort=None, reverse=False, comparison=None, **kwargs):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
List files limited by optional filters
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
|
2018-02-12 20:15:02 +01:00
|
|
|
[--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
|
|
|
|
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
|
2019-02-01 22:17:10 +01:00
|
|
|
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
|
|
|
|
[--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
|
|
|
|
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
2018-02-12 20:15:02 +01:00
|
|
|
downloads folder
|
2018-02-24 19:13:29 +01:00
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--claim_id=<claim_id> : (str) get file with matching claim id
|
|
|
|
--outpoint=<outpoint> : (str) get file with matching claim outpoint
|
|
|
|
--txid=<txid> : (str) get file with matching claim txid
|
|
|
|
--nout=<nout> : (int) get file with matching claim nout
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
|
2019-02-01 22:17:10 +01:00
|
|
|
--channel_name=<channel_name> : (str) get file with matching channel name
|
2018-02-24 19:13:29 +01:00
|
|
|
--claim_name=<claim_name> : (str) get file with matching claim name
|
2019-02-01 22:17:10 +01:00
|
|
|
--blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
|
|
|
|
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
|
|
|
|
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
|
|
|
|
--comparison=<comparison> : (str) logical comparision, (eq | ne | g | ge | l | le)
|
2016-04-24 10:42:42 +02:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {List[File]}
|
2017-03-06 23:01:35 +01:00
|
|
|
"""
|
2019-02-15 22:44:31 +01:00
|
|
|
sort = sort or 'rowid'
|
2019-01-22 23:44:17 +01:00
|
|
|
comparison = comparison or 'eq'
|
2019-04-06 21:33:07 +02:00
|
|
|
return self.stream_manager.get_filtered_streams(
|
|
|
|
sort, reverse, comparison, **kwargs
|
|
|
|
)
|
2016-10-20 21:52:37 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
@requires(STREAM_MANAGER_COMPONENT)
|
|
|
|
async def jsonrpc_file_set_status(self, status, **kwargs):
|
2016-07-28 22:12:20 +02:00
|
|
|
"""
|
2019-03-24 21:55:04 +01:00
|
|
|
Start or stop downloading a file
|
2016-07-28 22:12:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-24 21:55:04 +01:00
|
|
|
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
|
|
|
|
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2019-03-24 21:55:04 +01:00
|
|
|
--status=<status> : (str) one of "start" or "stop"
|
|
|
|
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) set status of file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) set status of file with matching row id
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
2019-03-24 21:55:04 +01:00
|
|
|
(str) Confirmation message
|
|
|
|
"""
|
2017-06-23 20:47:28 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
if status not in ['start', 'stop']:
|
|
|
|
raise Exception('Status must be "start" or "stop".')
|
2017-06-23 20:47:28 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
streams = self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
if not streams:
|
|
|
|
raise Exception(f'Unable to find a file for {kwargs}')
|
|
|
|
stream = streams[0]
|
|
|
|
if status == 'start' and not stream.running:
|
|
|
|
await self.stream_manager.start_stream(stream)
|
|
|
|
msg = "Resumed download"
|
|
|
|
elif status == 'stop' and stream.running:
|
|
|
|
await self.stream_manager.stop_stream(stream)
|
|
|
|
msg = "Stopped download"
|
2017-06-23 18:00:33 +02:00
|
|
|
else:
|
2019-03-24 21:55:04 +01:00
|
|
|
msg = (
|
|
|
|
"File was already being downloaded" if status == 'start'
|
|
|
|
else "File was already stopped"
|
|
|
|
)
|
|
|
|
return msg
|
2016-07-28 20:55:17 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
@requires(STREAM_MANAGER_COMPONENT)
|
|
|
|
async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2019-03-24 21:55:04 +01:00
|
|
|
Delete a LBRY file
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-24 21:55:04 +01:00
|
|
|
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
|
|
|
|
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
|
|
|
|
[--channel_name=<channel_name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2019-03-24 21:55:04 +01:00
|
|
|
--delete_from_download_dir : (bool) delete file from download directory,
|
|
|
|
instead of just deleting blobs
|
|
|
|
--delete_all : (bool) if there are multiple matching files,
|
|
|
|
allow the deletion of multiple files.
|
|
|
|
Otherwise do not delete anything.
|
|
|
|
--sd_hash=<sd_hash> : (str) delete by file sd hash
|
|
|
|
--file_name=<file_name> : (str) delete by file name in downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) delete by file stream hash
|
|
|
|
--rowid=<rowid> : (int) delete by file row id
|
|
|
|
--claim_id=<claim_id> : (str) delete by file claim id
|
|
|
|
--txid=<txid> : (str) delete by file claim txid
|
|
|
|
--nout=<nout> : (int) delete by file claim nout
|
|
|
|
--claim_name=<claim_name> : (str) delete by file claim name
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) delete by file channel claim name
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
2019-03-24 21:55:04 +01:00
|
|
|
(bool) true if deletion was successful
|
|
|
|
"""
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
streams = self.stream_manager.get_filtered_streams(**kwargs)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
if len(streams) > 1:
|
2017-04-07 02:45:05 +02:00
|
|
|
if not delete_all:
|
|
|
|
log.warning("There are %i files to delete, use narrower filters to select one",
|
2019-01-22 23:44:17 +01:00
|
|
|
len(streams))
|
2018-12-15 21:31:02 +01:00
|
|
|
return False
|
2017-04-07 02:45:05 +02:00
|
|
|
else:
|
|
|
|
log.warning("Deleting %i files",
|
2019-01-22 23:44:17 +01:00
|
|
|
len(streams))
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
if not streams:
|
2017-03-08 20:19:54 +01:00
|
|
|
log.warning("There is no file to delete")
|
2018-12-15 21:31:02 +01:00
|
|
|
return False
|
2016-10-20 21:52:37 +02:00
|
|
|
else:
|
2019-01-22 23:44:17 +01:00
|
|
|
for stream in streams:
|
2019-02-20 20:48:25 +01:00
|
|
|
message = f"Deleted file {stream.file_name}"
|
2019-01-22 23:44:17 +01:00
|
|
|
await self.stream_manager.delete_stream(stream, delete_file=delete_from_download_dir)
|
2019-02-20 20:48:25 +01:00
|
|
|
log.info(message)
|
2019-01-22 23:44:17 +01:00
|
|
|
result = True
|
|
|
|
return result
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
CLAIM_DOC = """
|
2019-03-30 02:41:24 +01:00
|
|
|
List and search all types of claims.
|
2019-01-23 19:00:58 +01:00
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
def jsonrpc_claim_list(self, account_id=None, page=None, page_size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
List my stream and channel claims.
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-25 23:30:43 +01:00
|
|
|
claim_list [<account_id> | --account_id=<account_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2019-03-25 23:30:43 +01:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
2019-03-25 23:30:43 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
return maybe_paginate(
|
|
|
|
account.get_claims,
|
|
|
|
account.get_claim_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
2019-03-26 03:06:36 +01:00
|
|
|
async def jsonrpc_claim_search(
|
|
|
|
self, name=None, claim_id=None, txid=None, nout=None,
|
|
|
|
channel_id=None, winning=False, page=1, page_size=10):
|
2019-03-25 23:30:43 +01:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
Search for stream and channel claims on the blockchain.
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
Use --channel_id=<channel_id> to list all stream claims in a channel.
|
2019-03-25 23:30:43 +01:00
|
|
|
|
|
|
|
Usage:
|
2019-03-26 04:44:36 +01:00
|
|
|
claim_search [<name> | --name=<name>] [--claim_id=<claim_id>] [--txid=<txid> --nout=<nout>]
|
2019-03-26 03:06:36 +01:00
|
|
|
[--channel_id=<channel_id>] [--winning] [--page=<page>] [--page_size=<page_size>]
|
2019-03-25 23:30:43 +01:00
|
|
|
|
|
|
|
Options:
|
2019-03-26 03:06:36 +01:00
|
|
|
--name=<name> : (str) find claims with this name
|
|
|
|
--claim_id=<claim_id> : (str) find a claim with this claim_id
|
|
|
|
--txid=<txid> : (str) find a claim with this txid:nout
|
|
|
|
--nout=<nout> : (str) find a claim with this txid:nout
|
2019-03-29 18:52:18 +01:00
|
|
|
--channel_id=<channel_id> : (str) limit search to specific channel claim id (returns stream claims)
|
2019-03-26 03:06:36 +01:00
|
|
|
--winning : (bool) limit to winning claims
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
2019-03-26 03:06:36 +01:00
|
|
|
"""
|
|
|
|
claims = []
|
|
|
|
if name is not None:
|
|
|
|
claims = await self.ledger.network.get_claims_for_name(name)
|
|
|
|
elif claim_id is not None:
|
|
|
|
claim = await self.wallet_manager.get_claim_by_claim_id(claim_id)
|
|
|
|
if claim and claim != 'claim not found':
|
|
|
|
claims = {'claims': [claim]}
|
|
|
|
elif txid is not None and nout is not None:
|
|
|
|
claim = await self.wallet_manager.get_claim_by_outpoint(txid, int(nout))
|
|
|
|
if claim and claim != 'claim not found':
|
|
|
|
claims = {'claims': [claim]}
|
|
|
|
elif channel_id is not None:
|
|
|
|
claim = await self.wallet_manager.get_claim_by_claim_id(channel_id)
|
|
|
|
if claim and claim != 'claim not found':
|
|
|
|
channel_url = f"{claim['name']}#{claim['claim_id']}"
|
2019-03-26 04:44:36 +01:00
|
|
|
resolve = await self.resolve(channel_url, page=page, page_size=page_size)
|
2019-03-26 03:06:36 +01:00
|
|
|
resolve = resolve.get(channel_url, {})
|
|
|
|
claims = resolve.get('claims_in_channel', []) or []
|
|
|
|
total_pages = 0
|
|
|
|
if claims:
|
|
|
|
total_pages = int((resolve['total_claims'] + (page_size-1)) / page_size)
|
|
|
|
#sort_claim_results(claims)
|
|
|
|
return {"items": claims, "total_pages": total_pages, "page": page, "page_size": page_size}
|
2019-03-25 23:30:43 +01:00
|
|
|
else:
|
2019-04-12 15:34:58 +02:00
|
|
|
raise Exception("Must specify either name, claim_id, or txid:nout.")
|
2019-03-26 03:06:36 +01:00
|
|
|
if claims:
|
|
|
|
resolutions = await self.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in claims['claims']))
|
|
|
|
claims = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
|
|
|
|
sort_claim_results(claims)
|
|
|
|
return {"items": claims, "total_pages": 1, "page": 1, "page_size": len(claims)}
|
2019-03-25 23:30:43 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
CHANNEL_DOC = """
|
2019-03-30 02:41:24 +01:00
|
|
|
Create, update, abandon and list your channel claims.
|
2019-01-21 21:55:50 +01:00
|
|
|
"""
|
|
|
|
|
2019-03-25 03:59:55 +01:00
|
|
|
@deprecated('channel_create')
|
|
|
|
def jsonrpc_channel_new(self):
|
|
|
|
""" deprecated """
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2019-03-24 21:55:04 +01:00
|
|
|
async def jsonrpc_channel_create(
|
2019-04-21 05:54:34 +02:00
|
|
|
self, name, bid, allow_duplicate_name=False, account_id=None, claim_address=None,
|
|
|
|
preview=False, **kwargs):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
Create a new channel by generating a channel private key and establishing an '@' prefixed claim.
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
channel_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
|
2019-03-30 01:26:10 +01:00
|
|
|
[--allow_duplicate_name=<allow_duplicate_name>]
|
2019-04-21 05:54:34 +02:00
|
|
|
[--title=<title>] [--description=<description>] [--email=<email>] [--featured=<featured>...]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
2019-04-20 07:17:26 +02:00
|
|
|
[--website_url=<website_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2019-03-24 21:55:04 +01:00
|
|
|
--name=<name> : (str) name of the channel prefixed with '@'
|
|
|
|
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with
|
|
|
|
given name. default: false.
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
2019-04-21 05:54:34 +02:00
|
|
|
--featured=<featured> : (list) claim_ids of featured content in channel
|
2019-03-30 01:26:10 +01:00
|
|
|
--tags=<tags> : (list) content tags
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="::Manchester"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
2019-04-20 07:17:26 +02:00
|
|
|
--email=<email> : (str) email of channel owner
|
|
|
|
--website_url=<website_url> : (str) website url
|
2019-03-24 21:55:04 +01:00
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--cover_url=<cover_url> : (str) url of cover image
|
|
|
|
--account_id=<account_id> : (str) id of the account to store channel
|
|
|
|
--claim_address=<claim_address>: (str) address where the channel is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
2019-03-26 03:06:36 +01:00
|
|
|
self.valid_channel_name_or_error(name)
|
2019-03-24 21:55:04 +01:00
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
claim_address = await self.get_receiving_address(claim_address, account)
|
|
|
|
|
|
|
|
existing_channels = await account.get_channels(claim_name=name)
|
|
|
|
if len(existing_channels) > 0:
|
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a channel under the name '{name}'. "
|
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
claim = Claim()
|
|
|
|
claim.channel.update(**kwargs)
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
txo = tx.outputs[0]
|
|
|
|
txo.generate_channel_private_key()
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await tx.sign([account])
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
account.add_channel_private_key(txo.ref, txo.private_key)
|
|
|
|
self.default_wallet.save()
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, txo, claim_address, claim, name, dewies_to_lbc(amount)
|
|
|
|
)])
|
|
|
|
await self.analytics_manager.send_new_channel()
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_channel_update(
|
|
|
|
self, claim_id, bid=None, account_id=None, claim_address=None,
|
|
|
|
new_signing_key=False, preview=False, **kwargs):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
Update an existing channel claim.
|
2018-08-17 21:02:14 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
channel_update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--title=<title>] [--description=<description>]
|
2019-04-21 05:54:34 +02:00
|
|
|
[--featured=<featured>...] [--clear_featured]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
2019-04-20 07:17:26 +02:00
|
|
|
[--email=<email>]
|
|
|
|
[--website_url=<website_url>] [--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
|
2019-03-29 17:10:27 +01:00
|
|
|
[--account_id=<account_id>] [--claim_address=<claim_address>] [--new_signing_key] [--preview]
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the channel to update
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
2019-04-21 05:54:34 +02:00
|
|
|
--clear_featured : (bool) clear existing featured content (prior to adding new ones)
|
|
|
|
--featured=<featured> : (list) claim_ids of featured content in channel
|
2019-03-30 01:26:10 +01:00
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
|
|
|
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="::Manchester"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
2019-04-20 07:17:26 +02:00
|
|
|
--email=<email> : (str) email of channel owner
|
|
|
|
--website_url=<website_url> : (str) website url
|
2019-03-24 21:55:04 +01:00
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--cover_url=<cover_url> : (str) url of cover image
|
|
|
|
--account_id=<account_id> : (str) id of the account to store channel
|
|
|
|
--claim_address=<claim_address>: (str) address where the channel is sent
|
2019-03-29 17:10:27 +01:00
|
|
|
--new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes
|
2019-03-24 21:55:04 +01:00
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
|
|
|
|
existing_channels = await account.get_claims(claim_id=claim_id)
|
|
|
|
if len(existing_channels) != 1:
|
|
|
|
raise Exception(
|
|
|
|
f"Can't find the channel '{claim_id}' in account '{account_id}'."
|
|
|
|
)
|
|
|
|
old_txo = existing_channels[0]
|
|
|
|
if not old_txo.claim.is_channel:
|
|
|
|
raise Exception(
|
|
|
|
f"A claim with id '{claim_id}' was found but it is not a channel."
|
|
|
|
)
|
|
|
|
|
|
|
|
if bid is not None:
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
else:
|
|
|
|
amount = old_txo.amount
|
2018-10-18 01:07:17 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
if claim_address is not None:
|
2019-03-25 00:45:54 +01:00
|
|
|
self.valid_address_or_error(claim_address)
|
2019-03-24 21:55:04 +01:00
|
|
|
else:
|
|
|
|
claim_address = old_txo.get_address(account.ledger)
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
claim = Claim.from_bytes(old_txo.claim.to_bytes())
|
|
|
|
claim.channel.update(**kwargs)
|
2019-03-24 21:55:04 +01:00
|
|
|
tx = await Transaction.claim_update(
|
2019-04-21 05:54:34 +02:00
|
|
|
old_txo, claim, amount, claim_address, [account], account
|
2018-10-18 01:07:17 +02:00
|
|
|
)
|
2019-03-24 21:55:04 +01:00
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
if new_signing_key:
|
|
|
|
new_txo.generate_channel_private_key()
|
|
|
|
else:
|
|
|
|
new_txo.private_key = old_txo.private_key
|
|
|
|
|
2019-03-25 02:47:04 +01:00
|
|
|
new_txo.script.generate()
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
if not preview:
|
|
|
|
await tx.sign([account])
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
account.add_channel_private_key(new_txo.ref, new_txo.private_key)
|
|
|
|
self.default_wallet.save()
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
|
|
|
|
)])
|
|
|
|
await self.analytics_manager.send_new_channel()
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_channel_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, account_id=None,
|
|
|
|
preview=False, blocking=True):
|
|
|
|
"""
|
|
|
|
Abandon one of my channel claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[--account_id=<account_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-30 02:41:24 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
|
|
|
|
if txid is not None and nout is not None:
|
|
|
|
claims = await account.get_claims(**{'txo.txid': txid, 'txo.position': nout})
|
|
|
|
elif claim_id is not None:
|
|
|
|
claims = await account.get_claims(claim_id=claim_id)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not claims:
|
|
|
|
raise Exception('No claim found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in claims], [], [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
await self.analytics_manager.send_claim_action('abandon')
|
|
|
|
if blocking:
|
|
|
|
await account.ledger.wait(tx)
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_channel_list(self, account_id=None, page=None, page_size=None):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
List my channel claims.
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-10-18 01:07:17 +02:00
|
|
|
channel_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-10-08 16:41:07 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Paginated[Output]}
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_channels,
|
|
|
|
account.get_channel_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_channel_export(self, claim_id):
|
2017-11-22 19:46:34 +01:00
|
|
|
"""
|
|
|
|
Export serialized channel signing information for a given certificate claim id
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_export (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) Claim ID to export information about
|
|
|
|
|
2017-11-22 19:46:34 +01:00
|
|
|
Returns:
|
|
|
|
(str) Serialized certificate information
|
|
|
|
"""
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return await self.wallet_manager.export_certificate_info(claim_id)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_channel_import(self, serialized_certificate_info):
|
2017-11-22 19:46:34 +01:00
|
|
|
"""
|
|
|
|
Import serialized channel signing information (to allow signing new claims to the channel)
|
|
|
|
|
|
|
|
Usage:
|
2018-03-26 19:16:55 +02:00
|
|
|
channel_import (<serialized_certificate_info> | --serialized_certificate_info=<serialized_certificate_info>)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--serialized_certificate_info=<serialized_certificate_info> : (str) certificate info
|
|
|
|
|
2017-11-22 19:46:34 +01:00
|
|
|
Returns:
|
|
|
|
(dict) Result dictionary
|
|
|
|
"""
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return await self.wallet_manager.import_certificate_info(serialized_certificate_info)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
STREAM_DOC = """
|
2019-03-30 02:41:24 +01:00
|
|
|
Create, update, abandon, list and inspect your stream claims.
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2019-03-25 23:30:43 +01:00
|
|
|
async def jsonrpc_publish(self, name, **kwargs):
|
|
|
|
"""
|
|
|
|
Create or update a stream claim at a given name (use 'stream create/update' for more control).
|
|
|
|
|
|
|
|
Usage:
|
2019-03-27 21:02:17 +01:00
|
|
|
publish (<name> | --name=<name>) [--bid=<bid>] [--file_path=<file_path>]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
2019-03-25 23:30:43 +01:00
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
|
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
2019-04-21 05:54:34 +02:00
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
2019-03-27 21:02:17 +01:00
|
|
|
[--channel_id=<channel_id>] [--channel_name=<channel_name>]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
2019-03-25 23:30:43 +01:00
|
|
|
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
2019-03-30 01:26:10 +01:00
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
2019-03-30 01:30:08 +01:00
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
2019-03-30 01:26:10 +01:00
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="::Manchester"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
2019-04-21 05:54:34 +02:00
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
2019-03-25 23:30:43 +01:00
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
2019-03-27 21:02:17 +01:00
|
|
|
--channel_name=<channel_name> : (str) name of publisher channel
|
2019-03-25 23:30:43 +01:00
|
|
|
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for funding the transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-25 23:30:43 +01:00
|
|
|
"""
|
|
|
|
self.valid_stream_name_or_error(name)
|
|
|
|
account = self.get_account_or_default(kwargs.get('account_id'))
|
|
|
|
claims = await account.get_claims(claim_name=name)
|
|
|
|
if len(claims) == 0:
|
2019-03-27 21:02:17 +01:00
|
|
|
if 'bid' not in kwargs:
|
|
|
|
raise Exception("'bid' is a required argument for new publishes.")
|
|
|
|
if 'file_path' not in kwargs:
|
|
|
|
raise Exception("'file_path' is a required argument for new publishes.")
|
2019-03-25 23:30:43 +01:00
|
|
|
return await self.jsonrpc_stream_create(name, **kwargs)
|
|
|
|
elif len(claims) == 1:
|
|
|
|
assert claims[0].claim.is_stream, f"Claim at name '{name}' is not a stream claim."
|
|
|
|
return await self.jsonrpc_stream_update(claims[0].claim_id, **kwargs)
|
|
|
|
raise Exception(
|
|
|
|
f"There are {len(claims)} claims for '{name}', please use 'stream update' command "
|
|
|
|
f"to update a specific stream claim."
|
|
|
|
)
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_stream_create(
|
2019-03-24 21:55:04 +01:00
|
|
|
self, name, bid, file_path, allow_duplicate_name=False,
|
2019-03-27 21:02:17 +01:00
|
|
|
channel_id=None, channel_name=None, channel_account_id=None,
|
2019-03-24 21:55:04 +01:00
|
|
|
account_id=None, claim_address=None, preview=False, **kwargs):
|
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
Make a new stream claim and announce the associated file to lbrynet.
|
2017-03-16 19:29:34 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-04-21 05:54:34 +02:00
|
|
|
stream_create (<name> | --name=<name>) (<bid> | --bid=<bid>) (<file_path> | --file_path=<file_path>)
|
2019-03-30 01:26:10 +01:00
|
|
|
[--allow_duplicate_name=<allow_duplicate_name>]
|
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
2019-04-21 05:54:34 +02:00
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
2019-03-27 21:02:17 +01:00
|
|
|
[--channel_id=<channel_id>] [--channel_name=<channel_name>]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-05 21:38:30 +02:00
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
2019-03-24 21:55:04 +01:00
|
|
|
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
|
|
|
|
given name. default: false.
|
2018-06-09 23:38:33 +02:00
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
2019-03-24 21:55:04 +01:00
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
2018-02-24 19:13:29 +01:00
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
2018-08-31 00:05:10 +02:00
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
2019-03-30 01:26:10 +01:00
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
2019-03-30 01:30:08 +01:00
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
2019-03-30 01:26:10 +01:00
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="::Manchester"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
2019-03-24 21:55:04 +01:00
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
2019-04-21 05:54:34 +02:00
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
2019-03-24 21:55:04 +01:00
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
2018-10-18 01:07:17 +02:00
|
|
|
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
|
2019-03-24 21:55:04 +01:00
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for funding the transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
self.valid_stream_name_or_error(name)
|
2019-03-24 21:55:04 +01:00
|
|
|
account = self.get_account_or_default(account_id)
|
2019-03-27 21:02:17 +01:00
|
|
|
channel = await self.get_channel_or_none(channel_account_id, channel_id, channel_name, for_signing=True)
|
2019-03-24 21:55:04 +01:00
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
claim_address = await self.get_receiving_address(claim_address, account)
|
|
|
|
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
|
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
claims = await account.get_claims(claim_name=name)
|
|
|
|
if len(claims) > 0:
|
2019-03-24 21:55:04 +01:00
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
2019-03-25 23:30:43 +01:00
|
|
|
f"You already have a stream claim published under the name '{name}'. "
|
2019-03-24 21:55:04 +01:00
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
|
|
|
|
|
|
|
claim = Claim()
|
2019-03-28 01:32:43 +01:00
|
|
|
claim.stream.update(file_path=file_path, sd_hash='0'*96, **kwargs)
|
2019-03-24 21:55:04 +01:00
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, amount, claim_address, [account], account, channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
if not preview:
|
2019-03-25 02:47:04 +01:00
|
|
|
file_stream = await self.stream_manager.create_stream(file_path)
|
2019-04-20 07:12:43 +02:00
|
|
|
claim.stream.source.sd_hash = file_stream.sd_hash
|
2019-03-25 02:47:04 +01:00
|
|
|
new_txo.script.generate()
|
2019-03-24 21:55:04 +01:00
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign([account])
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
|
|
|
|
)])
|
2019-04-20 07:12:43 +02:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(claim.stream.source.sd_hash)
|
2019-03-24 21:55:04 +01:00
|
|
|
if stream_hash:
|
|
|
|
await self.storage.save_content_claim(stream_hash, new_txo.id)
|
|
|
|
await self.analytics_manager.send_claim_action('publish')
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2019-03-25 23:30:43 +01:00
|
|
|
async def jsonrpc_stream_update(
|
2019-03-24 21:55:04 +01:00
|
|
|
self, claim_id, bid=None, file_path=None,
|
2019-03-27 21:02:17 +01:00
|
|
|
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
|
2019-03-24 21:55:04 +01:00
|
|
|
account_id=None, claim_address=None,
|
|
|
|
preview=False, **kwargs):
|
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
Update an existing stream claim and if a new file is provided announce it to lbrynet.
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
Usage:
|
2019-03-30 01:26:10 +01:00
|
|
|
stream_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] [--file_path=<file_path>]
|
2019-04-22 21:24:51 +02:00
|
|
|
[--file_name=<file_name>] [--file_size=<file_size>] [--file_hash=<file_hash>]
|
2019-03-30 01:26:10 +01:00
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
|
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>] [--language=<language>]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
2019-04-21 05:54:34 +02:00
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
2019-03-27 21:02:17 +01:00
|
|
|
[--channel_id=<channel_id>] [--channel_name=<channel_name>] [--clear_channel]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
2019-03-24 21:55:04 +01:00
|
|
|
[--account_id=<account_id>] [--claim_address=<claim_address>] [--preview]
|
|
|
|
|
|
|
|
Options:
|
2019-03-25 23:30:43 +01:00
|
|
|
--claim_id=<claim_id> : (str) id of the stream claim to update
|
2019-03-24 21:55:04 +01:00
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
2019-04-22 21:24:51 +02:00
|
|
|
--file_name=<file_name> : (str) override file name, defaults to name from file_path.
|
|
|
|
--file_size=<file_size> : (str) override file size, otherwise automatically computed.
|
|
|
|
--file_hash=<file_hash> : (str) override file hash, otherwise automatically computed.
|
2019-03-24 21:55:04 +01:00
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
2019-03-30 01:26:10 +01:00
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
2019-03-30 01:30:08 +01:00
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
2019-03-30 01:26:10 +01:00
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="::Manchester"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="42.990605:-71.460989"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
2019-03-30 01:41:18 +01:00
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
2019-03-30 01:26:10 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
2019-04-21 05:54:34 +02:00
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
2019-03-24 21:55:04 +01:00
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
2019-03-27 21:02:17 +01:00
|
|
|
--clear_channel : (bool) remove channel signature
|
2019-03-24 21:55:04 +01:00
|
|
|
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
|
2019-03-25 23:30:43 +01:00
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for funding the transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-09-19 22:58:50 +02:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
existing_claims = await account.get_claims(claim_id=claim_id)
|
|
|
|
if len(existing_claims) != 1:
|
|
|
|
raise Exception(
|
|
|
|
f"Can't find the claim '{claim_id}' in account '{account_id}'."
|
|
|
|
)
|
|
|
|
old_txo = existing_claims[0]
|
|
|
|
if not old_txo.claim.is_stream:
|
|
|
|
raise Exception(
|
|
|
|
f"A claim with id '{claim_id}' was found but it is not a stream claim."
|
|
|
|
)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
if bid is not None:
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
else:
|
|
|
|
amount = old_txo.amount
|
|
|
|
|
|
|
|
if claim_address is not None:
|
|
|
|
self.valid_address_or_error(claim_address)
|
|
|
|
else:
|
|
|
|
claim_address = old_txo.get_address(account.ledger)
|
|
|
|
|
|
|
|
channel = None
|
2019-03-27 21:02:17 +01:00
|
|
|
if channel_id or channel_name:
|
|
|
|
channel = await self.get_channel_or_error(channel_account_id, channel_id, channel_name, for_signing=True)
|
2019-03-25 23:30:43 +01:00
|
|
|
elif old_txo.claim.is_signed and not clear_channel:
|
|
|
|
channel = old_txo.channel
|
2019-03-25 00:45:54 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
if 'fee_address' in kwargs:
|
|
|
|
self.valid_address_or_error(kwargs['fee_address'])
|
|
|
|
|
|
|
|
claim = Claim.from_bytes(old_txo.claim.to_bytes())
|
|
|
|
claim.stream.update(file_path=file_path, **kwargs)
|
2019-03-25 23:30:43 +01:00
|
|
|
tx = await Transaction.claim_update(
|
2019-04-21 05:54:34 +02:00
|
|
|
old_txo, claim, amount, claim_address, [account], account, channel
|
2019-03-25 23:30:43 +01:00
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
2019-03-25 14:59:32 +01:00
|
|
|
|
|
|
|
if not preview:
|
2019-03-25 23:30:43 +01:00
|
|
|
if file_path is not None:
|
|
|
|
file_stream = await self.stream_manager.create_stream(file_path)
|
2019-04-20 07:12:43 +02:00
|
|
|
new_txo.claim.stream.source.sd_hash = file_stream.sd_hash
|
2019-03-25 23:30:43 +01:00
|
|
|
new_txo.script.generate()
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign([account])
|
2019-03-25 14:59:32 +01:00
|
|
|
await account.ledger.broadcast(tx)
|
2019-03-25 23:30:43 +01:00
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
|
|
|
|
)])
|
2019-04-20 07:12:43 +02:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(new_txo.claim.stream.source.sd_hash)
|
2019-03-25 23:30:43 +01:00
|
|
|
if stream_hash:
|
|
|
|
await self.storage.save_content_claim(stream_hash, new_txo.id)
|
|
|
|
await self.analytics_manager.send_claim_action('publish')
|
2019-03-25 14:59:32 +01:00
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_stream_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, account_id=None,
|
|
|
|
preview=False, blocking=True):
|
|
|
|
"""
|
|
|
|
Abandon one of my stream claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[--account_id=<account_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-30 02:41:24 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
|
|
|
|
if txid is not None and nout is not None:
|
|
|
|
claims = await account.get_claims(**{'txo.txid': txid, 'txo.position': nout})
|
|
|
|
elif claim_id is not None:
|
|
|
|
claims = await account.get_claims(claim_id=claim_id)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not claims:
|
|
|
|
raise Exception('No claim found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in claims], [], [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
await self.analytics_manager.send_claim_action('abandon')
|
|
|
|
if blocking:
|
|
|
|
await account.ledger.wait(tx)
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
@requires(WALLET_COMPONENT)
|
2019-03-25 23:30:43 +01:00
|
|
|
def jsonrpc_stream_list(self, account_id=None, page=None, page_size=None):
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
List my stream claims.
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-26 04:44:36 +01:00
|
|
|
stream_list [<account_id> | --account_id=<account_id>]
|
2019-03-25 03:59:55 +01:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2019-03-24 21:55:04 +01:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
return maybe_paginate(
|
2019-03-25 23:30:43 +01:00
|
|
|
account.get_streams,
|
|
|
|
account.get_stream_count,
|
2019-03-24 21:55:04 +01:00
|
|
|
page, page_size
|
|
|
|
)
|
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
|
|
|
DHT_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
def jsonrpc_stream_cost_estimate(self, uri):
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
Get estimated cost for a lbry stream
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
Usage:
|
2019-03-25 23:30:43 +01:00
|
|
|
stream_cost_estimate (<uri> | --uri=<uri>)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
Options:
|
2019-03-25 23:30:43 +01:00
|
|
|
--uri=<uri> : (str) uri to use
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-03-25 03:20:17 +01:00
|
|
|
Returns:
|
2019-03-25 23:30:43 +01:00
|
|
|
(float) Estimated cost in lbry credits, returns None if uri is not
|
|
|
|
resolvable
|
2019-03-25 03:20:17 +01:00
|
|
|
"""
|
2019-03-25 23:30:43 +01:00
|
|
|
return self.get_est_cost_from_uri(uri)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
SUPPORT_DOC = """
|
2019-03-25 23:30:43 +01:00
|
|
|
Create, list and abandon all types of supports.
|
2019-03-24 21:55:04 +01:00
|
|
|
"""
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2019-03-24 23:14:02 +01:00
|
|
|
async def jsonrpc_support_create(self, claim_id, amount, tip=False, account_id=None, preview=False):
|
2018-08-31 22:49:55 +02:00
|
|
|
"""
|
2019-03-24 21:55:04 +01:00
|
|
|
Create a support or a tip for name claim.
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
Usage:
|
2019-03-25 03:59:55 +01:00
|
|
|
support_create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
|
2019-03-24 23:14:02 +01:00
|
|
|
[--tip] [--account_id=<account_id>] [--preview]
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to support
|
|
|
|
--amount=<amount> : (decimal) amount of support
|
2019-03-24 21:55:04 +01:00
|
|
|
--tip : (bool) send support to claim owner, default: false.
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2019-03-24 23:14:02 +01:00
|
|
|
--preview : (bool) do not broadcast the transaction
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2018-08-31 22:49:55 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-08-31 22:49:55 +02:00
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
2019-03-24 23:14:02 +01:00
|
|
|
claim = await account.ledger.get_claim_by_claim_id(claim_id)
|
|
|
|
claim_name = claim['name']
|
|
|
|
claim_address = claim['address']
|
|
|
|
if not tip:
|
|
|
|
claim_address = await account.receiving.get_or_create_usable_address()
|
|
|
|
|
|
|
|
tx = await Transaction.support(
|
|
|
|
claim_name, claim_id, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await tx.sign([account])
|
|
|
|
await account.ledger.broadcast(tx)
|
2019-04-20 01:42:35 +02:00
|
|
|
await self.storage.save_supports({claim_id: [{
|
2019-03-24 23:14:02 +01:00
|
|
|
'txid': tx.id,
|
|
|
|
'nout': tx.position,
|
|
|
|
'address': claim_address,
|
|
|
|
'claim_id': claim_id,
|
|
|
|
'amount': dewies_to_lbc(amount)
|
2019-04-20 01:42:35 +02:00
|
|
|
}]})
|
2019-03-24 23:14:02 +01:00
|
|
|
await self.analytics_manager.send_claim_action('new_support')
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2019-03-24 21:55:04 +01:00
|
|
|
def jsonrpc_support_list(self, account_id=None, page=None, page_size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
List supports and tips in my control.
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-03-24 21:55:04 +01:00
|
|
|
support_list [<account_id> | --account_id=<account_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
2019-03-24 21:55:04 +01:00
|
|
|
account.get_supports,
|
|
|
|
account.get_support_count,
|
2018-10-10 02:46:41 +02:00
|
|
|
page, page_size
|
|
|
|
)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_support_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, keep=None,
|
|
|
|
account_id=None, preview=False, blocking=True):
|
|
|
|
"""
|
2019-03-26 03:06:36 +01:00
|
|
|
Abandon supports, including tips, of a specific claim, optionally
|
2019-03-25 23:30:43 +01:00
|
|
|
keeping some amount as supports.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
support_abandon [--claim_id=<claim_id>] [(--txid=<txid> --nout=<nout>)] [--keep=<keep>]
|
|
|
|
[--account_id=<account_id>] [--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--keep=<keep> : (decimal) amount of lbc to keep as support
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
Returns: {Transaction}
|
2019-03-25 23:30:43 +01:00
|
|
|
"""
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
if txid is not None and nout is not None:
|
2019-03-25 23:30:43 +01:00
|
|
|
supports = await account.get_supports(**{'txo.txid': txid, 'txo.position': nout})
|
2019-03-26 03:06:36 +01:00
|
|
|
elif claim_id is not None:
|
2019-03-25 23:30:43 +01:00
|
|
|
supports = await account.get_supports(claim_id=claim_id)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not supports:
|
|
|
|
raise Exception('No supports found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
if keep is not None:
|
|
|
|
keep = self.get_dewies_or_error('keep', keep)
|
|
|
|
else:
|
|
|
|
keep = 0
|
|
|
|
|
|
|
|
outputs = []
|
|
|
|
if keep > 0:
|
|
|
|
outputs = [
|
|
|
|
Output.pay_support_pubkey_hash(
|
2019-03-29 23:00:26 +01:00
|
|
|
keep, supports[0].claim_name, supports[0].claim_id, supports[0].pubkey_hash
|
2019-03-25 23:30:43 +01:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in supports], outputs, [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
await self.analytics_manager.send_claim_action('abandon')
|
|
|
|
if blocking:
|
|
|
|
await account.ledger.wait(tx)
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
2019-01-23 19:00:58 +01:00
|
|
|
TRANSACTION_DOC = """
|
2019-01-21 21:55:50 +01:00
|
|
|
Transaction management.
|
|
|
|
"""
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_transaction_list(self, account_id=None, page=None, page_size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
List transactions belonging to wallet
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
transaction_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
Returns:
|
2018-01-18 19:41:17 +01:00
|
|
|
(list) List of transactions
|
2017-08-31 15:58:30 +02:00
|
|
|
|
|
|
|
{
|
2018-01-11 20:41:35 +01:00
|
|
|
"claim_info": (list) claim info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
|
|
|
"balance_delta": (float) bid amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"abandon_info": (list) abandon info if in txn [{
|
|
|
|
"address": (str) address of abandoned claim,
|
|
|
|
"balance_delta": (float) returned amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"confirmations": (int) number of confirmations for the txn,
|
|
|
|
"date": (str) date and time of txn,
|
|
|
|
"fee": (float) txn fee,
|
2018-01-11 20:41:35 +01:00
|
|
|
"support_info": (list) support info if in txn [{
|
|
|
|
"address": (str) address of support,
|
|
|
|
"balance_delta": (float) support amount,
|
|
|
|
"amount": (float) support amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"is_tip": (bool),
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"timestamp": (int) timestamp,
|
|
|
|
"txid": (str) txn id,
|
2018-01-11 20:41:35 +01:00
|
|
|
"update_info": (list) update info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
2018-01-18 19:41:17 +01:00
|
|
|
"balance_delta": (float) credited/debited
|
2018-01-11 20:41:35 +01:00
|
|
|
"amount": (float) absolute amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"value": (float) value of txn
|
|
|
|
}
|
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
self.wallet_manager.get_history,
|
|
|
|
self.ledger.db.get_transaction_count,
|
|
|
|
page, page_size, account=account
|
|
|
|
)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_transaction_show(self, txid):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Get a decoded transaction from a txid
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
transaction_show (<txid> | --txid=<txid>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--txid=<txid> : (str) txid of the transaction
|
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Transaction}
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-09-19 15:58:50 +02:00
|
|
|
return self.wallet_manager.get_transaction(txid)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
UTXO_DOC = """
|
|
|
|
Unspent transaction management.
|
|
|
|
"""
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_utxo_list(self, account_id=None, page=None, page_size=None):
|
2017-11-01 22:17:38 +01:00
|
|
|
"""
|
|
|
|
List unspent transaction outputs
|
|
|
|
|
|
|
|
Usage:
|
2018-10-09 16:30:30 +02:00
|
|
|
utxo_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-11-01 22:17:38 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
Returns: {Paginated[Output]}
|
2017-11-01 22:17:38 +01:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_utxos,
|
|
|
|
account.get_utxo_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2017-11-01 22:17:38 +01:00
|
|
|
|
2019-01-04 08:49:29 +01:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
def jsonrpc_utxo_release(self, account_id=None):
|
|
|
|
"""
|
|
|
|
When spending a UTXO it is locally locked to prevent double spends;
|
|
|
|
occasionally this can result in a UTXO being locked which ultimately
|
|
|
|
did not get spent (failed to broadcast, spend transaction was not
|
|
|
|
accepted by blockchain node, etc). This command releases the lock
|
|
|
|
on all UTXOs in your account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
utxo_release [<account_id> | --account_id=<account_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
|
|
|
return self.get_account_or_default(account_id).release_all_outputs()
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_block_show(self, blockhash=None, height=None):
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Get contents of a block
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blockhash=<blockhash> : (str) hash of the block to look up
|
|
|
|
--height=<height> : (int) height of the block to look up
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
|
|
|
(dict) Requested block
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2018-09-07 08:57:58 +02:00
|
|
|
return self.wallet_manager.get_block(blockhash, height)
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2019-01-21 21:55:50 +01:00
|
|
|
BLOB_DOC = """
|
|
|
|
Blob management.
|
|
|
|
"""
|
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2019-01-22 23:44:17 +01:00
|
|
|
async def jsonrpc_blob_get(self, blob_hash, timeout=None, read=False):
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
Download and return a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-01-22 23:44:17 +01:00
|
|
|
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
|
|
|
|
--timeout=<timeout> : (int) timeout in number of seconds
|
2017-02-16 05:39:17 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/Fail message or (dict) decoded data
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
|
2019-01-30 20:57:09 +01:00
|
|
|
blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash)
|
2019-01-22 23:44:17 +01:00
|
|
|
if read:
|
|
|
|
with open(blob.file_path, 'rb') as handle:
|
|
|
|
return handle.read().decode()
|
2017-02-16 05:39:17 +01:00
|
|
|
else:
|
2019-01-22 23:44:17 +01:00
|
|
|
return "Downloaded blob %s" % blob_hash
|
2017-02-16 05:39:17 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT, DATABASE_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_delete(self, blob_hash):
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
Delete a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-11-14 20:02:07 +01:00
|
|
|
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
|
2017-06-09 18:14:03 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/fail message
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
2019-02-18 18:14:21 +01:00
|
|
|
if not blob_hash or not is_valid_blobhash(blob_hash):
|
|
|
|
return f"Invalid blob hash to delete '{blob_hash}'"
|
2019-01-22 23:44:17 +01:00
|
|
|
streams = self.stream_manager.get_filtered_streams(sd_hash=blob_hash)
|
|
|
|
if streams:
|
|
|
|
await self.stream_manager.delete_stream(streams[0])
|
|
|
|
else:
|
|
|
|
await self.blob_manager.delete_blobs([blob_hash])
|
2018-09-21 15:47:06 +02:00
|
|
|
return "Deleted %s" % blob_hash
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2019-01-23 19:00:58 +01:00
|
|
|
PEER_DOC = """
|
|
|
|
DHT / Blob Exchange peer commands.
|
|
|
|
"""
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT)
|
2019-01-22 23:44:17 +01:00
|
|
|
async def jsonrpc_peer_list(self, blob_hash, search_bottom_out_limit=None):
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
Get peers for blob hash
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2019-01-22 23:44:17 +01:00
|
|
|
peer_list (<blob_hash> | --blob_hash=<blob_hash>)
|
|
|
|
[<search_bottom_out_limit> | --search_bottom_out_limit=<search_bottom_out_limit>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2019-01-22 23:44:17 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
|
|
|
|
--search_bottom_out_limit=<search_bottom_out_limit> : (int) the number of search probes in a row
|
|
|
|
that don't find any new peers
|
|
|
|
before giving up and returning
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-08-03 09:16:06 +02:00
|
|
|
Returns:
|
2019-01-22 23:44:17 +01:00
|
|
|
(list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>,
|
|
|
|
'node_id': <peer node id>}
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
|
2018-11-12 20:45:41 +01:00
|
|
|
if not is_valid_blobhash(blob_hash):
|
2018-04-03 19:06:16 +02:00
|
|
|
raise Exception("invalid blob hash")
|
2019-01-22 23:44:17 +01:00
|
|
|
if search_bottom_out_limit is not None:
|
|
|
|
search_bottom_out_limit = int(search_bottom_out_limit)
|
|
|
|
if search_bottom_out_limit <= 0:
|
|
|
|
raise Exception("invalid bottom out limit")
|
|
|
|
else:
|
|
|
|
search_bottom_out_limit = 4
|
|
|
|
peers = []
|
|
|
|
async for new_peers in self.dht_node.get_iterative_value_finder(unhexlify(blob_hash.encode()), max_results=1,
|
|
|
|
bottom_out_limit=search_bottom_out_limit):
|
|
|
|
peers.extend(new_peers)
|
2018-04-03 19:06:16 +02:00
|
|
|
results = [
|
|
|
|
{
|
2019-01-22 23:44:17 +01:00
|
|
|
"node_id": hexlify(peer.node_id).decode(),
|
|
|
|
"address": peer.address,
|
|
|
|
"udp_port": peer.udp_port,
|
|
|
|
"tcp_port": peer.tcp_port,
|
2018-04-03 19:06:16 +02:00
|
|
|
}
|
2019-01-22 23:44:17 +01:00
|
|
|
for peer in peers
|
2018-04-03 19:06:16 +02:00
|
|
|
]
|
2018-09-21 15:47:06 +02:00
|
|
|
return results
|
2016-08-03 09:16:06 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(DATABASE_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
|
2017-07-21 20:05:08 +02:00
|
|
|
"""
|
|
|
|
Announce blobs to the DHT
|
|
|
|
|
|
|
|
Usage:
|
2019-01-11 06:54:03 +01:00
|
|
|
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
|
|
|
|
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
|
2017-07-21 20:05:08 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
|
|
|
|
--stream_hash=<stream_hash> : (str) announce all blobs associated with
|
|
|
|
stream_hash
|
|
|
|
--sd_hash=<sd_hash> : (str) announce all blobs associated with
|
|
|
|
sd_hash and the sd_hash itself
|
2017-07-21 20:05:08 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if successful
|
|
|
|
"""
|
2018-03-29 00:47:37 +02:00
|
|
|
blob_hashes = []
|
|
|
|
if blob_hash:
|
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
elif stream_hash or sd_hash:
|
|
|
|
if sd_hash and stream_hash:
|
|
|
|
raise Exception("either the sd hash or the stream hash should be provided, not both")
|
|
|
|
if sd_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
|
2018-03-29 00:47:37 +02:00
|
|
|
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
|
2017-07-21 20:05:08 +02:00
|
|
|
else:
|
2018-03-29 00:47:37 +02:00
|
|
|
raise Exception('single argument must be specified')
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
|
2018-09-21 15:47:06 +02:00
|
|
|
return True
|
2017-07-21 20:05:08 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
|
2019-01-22 23:44:17 +01:00
|
|
|
finished=None, page_size=None, page=None):
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
|
2016-08-26 06:32:33 +02:00
|
|
|
|
2017-06-22 00:16:41 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
|
|
|
|
[<stream_hash> | --stream_hash=<stream_hash>]
|
|
|
|
[<sd_hash> | --sd_hash=<sd_hash>]
|
|
|
|
[<page_size> | --page_size=<page_size>]
|
2017-06-22 00:16:41 +02:00
|
|
|
[<page> | --page=<page>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--needed : (bool) only return needed blobs
|
|
|
|
--finished : (bool) only return finished blobs
|
|
|
|
--uri=<uri> : (str) filter blobs by stream in a uri
|
|
|
|
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
|
|
|
|
--sd_hash=<sd_hash> : (str) filter blobs by sd hash
|
|
|
|
--page_size=<page_size> : (int) results page size
|
|
|
|
--page=<page> : (int) page of results to return
|
2017-06-22 00:16:41 +02:00
|
|
|
|
2016-08-26 06:32:33 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of blob hashes
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
|
2018-02-22 16:27:32 +01:00
|
|
|
if uri or stream_hash or sd_hash:
|
|
|
|
if uri:
|
2019-03-25 00:45:54 +01:00
|
|
|
metadata = (await self.resolve(uri))[uri]
|
2018-02-22 16:27:32 +01:00
|
|
|
sd_hash = utils.get_sd_hash(metadata)
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
2018-02-22 16:27:32 +01:00
|
|
|
elif stream_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
2018-02-22 16:27:32 +01:00
|
|
|
elif sd_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
2019-01-22 23:44:17 +01:00
|
|
|
if sd_hash:
|
|
|
|
blobs = [sd_hash]
|
2018-02-22 16:27:32 +01:00
|
|
|
else:
|
2017-02-16 05:39:17 +01:00
|
|
|
blobs = []
|
2019-01-22 23:44:17 +01:00
|
|
|
if stream_hash:
|
2019-02-18 18:12:14 +01:00
|
|
|
blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]])
|
2017-02-16 05:39:17 +01:00
|
|
|
else:
|
2019-01-22 23:44:17 +01:00
|
|
|
blobs = list(self.blob_manager.completed_blob_hashes)
|
2017-02-16 05:39:17 +01:00
|
|
|
if needed:
|
2019-01-22 23:44:17 +01:00
|
|
|
blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.get_blob(blob_hash).get_is_verified()]
|
2017-02-16 05:39:17 +01:00
|
|
|
if finished:
|
2019-01-22 23:44:17 +01:00
|
|
|
blobs = [blob_hash for blob_hash in blobs if self.blob_manager.get_blob(blob_hash).get_is_verified()]
|
|
|
|
page_size = page_size or len(blobs)
|
2017-02-16 05:39:17 +01:00
|
|
|
page = page or 0
|
|
|
|
start_index = page * page_size
|
|
|
|
stop_index = start_index + page_size
|
2019-01-22 23:44:17 +01:00
|
|
|
return blobs[start_index:stop_index]
|
2016-08-27 01:58:53 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT)
|
2019-01-11 00:40:20 +01:00
|
|
|
async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None):
|
2018-03-22 21:54:29 +01:00
|
|
|
"""
|
|
|
|
Reflects specified blobs
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>]
|
|
|
|
|
|
|
|
Options:
|
2018-04-12 19:27:06 +02:00
|
|
|
--reflector_server=<reflector_server> : (str) reflector address
|
2018-03-22 21:54:29 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) reflected blob hashes
|
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
|
|
|
|
raise NotImplementedError()
|
2018-03-22 21:54:29 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_reflect_all(self):
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
|
|
|
Reflects all saved blobs
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_reflect_all
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-08-27 01:58:53 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if successful
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
2016-08-19 08:41:23 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@requires(STREAM_MANAGER_COMPONENT)
|
|
|
|
async def jsonrpc_file_reflect(self, **kwargs):
|
2018-03-29 16:46:29 +02:00
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
Reflect all the blobs in a file matching the filter criteria
|
2018-03-29 16:46:29 +02:00
|
|
|
|
|
|
|
Usage:
|
2019-01-22 23:44:17 +01:00
|
|
|
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
|
|
|
[--reflector=<reflector>]
|
2018-03-29 16:46:29 +02:00
|
|
|
|
2018-04-12 19:08:58 +02:00
|
|
|
Options:
|
2019-01-22 23:44:17 +01:00
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--reflector=<reflector> : (str) reflector server, ip address or url
|
|
|
|
by default choose a server from the config
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) list of blobs reflected
|
|
|
|
"""
|
|
|
|
|
2019-01-30 20:59:48 +01:00
|
|
|
server, port = kwargs.get('server'), kwargs.get('port')
|
|
|
|
if server and port:
|
|
|
|
port = int(port)
|
|
|
|
else:
|
|
|
|
server, port = random.choice(self.conf.reflector_servers)
|
2019-01-31 18:30:31 +01:00
|
|
|
reflected = await asyncio.gather(*[
|
2019-01-30 20:59:48 +01:00
|
|
|
stream.upload_to_reflector(server, port)
|
|
|
|
for stream in self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
])
|
2019-01-31 18:30:31 +01:00
|
|
|
total = []
|
|
|
|
for reflected_for_stream in reflected:
|
|
|
|
total.extend(reflected_for_stream)
|
|
|
|
return total
|
2019-01-22 23:44:17 +01:00
|
|
|
|
|
|
|
@requires(DHT_COMPONENT)
|
|
|
|
async def jsonrpc_peer_ping(self, node_id, address, port):
|
|
|
|
"""
|
|
|
|
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
|
|
|
|
if not provided the peer is located first.
|
2018-07-11 21:16:01 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
Usage:
|
|
|
|
peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>)
|
2018-04-12 19:08:58 +02:00
|
|
|
|
2019-02-19 23:26:08 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2018-03-29 16:46:29 +02:00
|
|
|
Returns:
|
|
|
|
(str) pong, or {'error': <error message>} if an error is encountered
|
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
peer = None
|
2018-07-11 21:16:01 +02:00
|
|
|
if node_id and address and port:
|
2019-02-02 08:02:19 +01:00
|
|
|
peer = self.component_manager.peer_manager.get_kademlia_peer(unhexlify(node_id), address,
|
|
|
|
udp_port=int(port))
|
|
|
|
try:
|
|
|
|
return await self.dht_node.protocol.get_rpc_peer(peer).ping()
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
return {'error': 'timeout'}
|
2019-01-22 23:44:17 +01:00
|
|
|
if not peer:
|
2018-09-21 15:47:06 +02:00
|
|
|
return {'error': 'peer not found'}
|
2018-03-29 16:46:29 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT)
|
2017-10-10 21:04:48 +02:00
|
|
|
def jsonrpc_routing_table_get(self):
|
|
|
|
"""
|
|
|
|
Get DHT routing information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
routing_table_get
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2017-10-10 21:04:48 +02:00
|
|
|
Returns:
|
2019-01-22 23:44:17 +01:00
|
|
|
(dict) dictionary containing routing and peer information
|
2017-10-10 21:04:48 +02:00
|
|
|
{
|
|
|
|
"buckets": {
|
|
|
|
<bucket index>: [
|
|
|
|
{
|
|
|
|
"address": (str) peer address,
|
2019-01-22 23:44:17 +01:00
|
|
|
"udp_port": (int) peer udp port,
|
|
|
|
"tcp_port": (int) peer tcp port,
|
2017-10-10 21:04:48 +02:00
|
|
|
"node_id": (str) peer node id,
|
|
|
|
}
|
2017-10-11 21:14:29 +02:00
|
|
|
]
|
|
|
|
},
|
2017-10-10 21:04:48 +02:00
|
|
|
"node_id": (str) the local dht node id
|
2017-10-11 21:14:29 +02:00
|
|
|
}
|
2017-10-10 21:04:48 +02:00
|
|
|
"""
|
2019-01-22 23:44:17 +01:00
|
|
|
result = {
|
|
|
|
'buckets': {}
|
|
|
|
}
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
for i in range(len(self.dht_node.protocol.routing_table.buckets)):
|
2018-11-13 15:36:52 +01:00
|
|
|
result['buckets'][i] = []
|
2019-01-22 23:44:17 +01:00
|
|
|
for peer in self.dht_node.protocol.routing_table.buckets[i].peers:
|
2017-10-10 21:04:48 +02:00
|
|
|
host = {
|
2019-01-22 23:44:17 +01:00
|
|
|
"address": peer.address,
|
|
|
|
"udp_port": peer.udp_port,
|
|
|
|
"tcp_port": peer.tcp_port,
|
|
|
|
"node_id": hexlify(peer.node_id).decode(),
|
2017-10-10 21:04:48 +02:00
|
|
|
}
|
2018-11-13 15:36:52 +01:00
|
|
|
result['buckets'][i].append(host)
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2019-01-22 23:44:17 +01:00
|
|
|
result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode()
|
2018-12-15 21:31:02 +01:00
|
|
|
return result
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2019-04-07 20:38:18 +02:00
|
|
|
COMMENT_DOC = """
|
|
|
|
Create and list comments.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
async def jsonrpc_comment_list(self, claim_id, parent_comment_id=None, flat=False,
|
|
|
|
page=1, page_size=None, max_replies_shown=5):
|
|
|
|
"""
|
|
|
|
List comments associated with a claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_list <claim_id> [--flat] [(--page=<page> --page_size=<page_size>)]
|
|
|
|
[--parent_comment_id=<parent_comment_id>]
|
|
|
|
[--max_replies_shown=<max_replies_shown>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--flat : (bool) Flag to indicate whether or not you want the
|
|
|
|
replies to be flattened along with the rest of
|
|
|
|
the comments attached to the claim. Off by default
|
|
|
|
--parent_comment_id=<parent_comment_id> : (int) The ID of an existing
|
|
|
|
comment to list replies from
|
|
|
|
--max_replies_shown=<max_replies_shown> : (int) For every comment that we pull replies from,
|
|
|
|
only retrieve up to this amount.
|
|
|
|
Note: This is not the same as page size.
|
|
|
|
--page=<page> : (int) The page you'd like to see in the comment list.
|
|
|
|
The first page is 1, second page is 2, and so on.
|
|
|
|
--page_size=<page_size> : (int) The amount of comments that you'd like to
|
|
|
|
retrieve in one request
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dict containing the following schema:
|
|
|
|
{
|
|
|
|
"page": (int) The page of comments as provided when limiting response to page_size.
|
|
|
|
"page_size": (int) Number of comments in the given page. -1 if page_size wasn't used
|
|
|
|
"comments": (list) Contains all the comments (as dicts) as provided by the specified parameters
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
# Should be like this:
|
2019-04-12 15:34:58 +02:00
|
|
|
# comment list [claim_id] [parent_comment_id] --flat --page=1 --page-size=10
|
2019-04-07 20:38:18 +02:00
|
|
|
url = self.conf.comment_server
|
|
|
|
# The server uses permanent URIs for keys; not claims.
|
|
|
|
# This is temporary until we can get that functionality removed
|
|
|
|
claim_info = (await self.jsonrpc_claim_search(claim_id=claim_id))
|
|
|
|
if 'error' in claim_info:
|
|
|
|
raise Exception(claim_info['error'])
|
|
|
|
if claim_info["page"] == 0:
|
|
|
|
return {'page': 1, 'page_size': 0, 'comments': []}
|
|
|
|
claim_uri = claim_info["items"][0]['permanent_url']
|
|
|
|
# These two cases need separation since getting replies requires a bit of magic
|
|
|
|
# to reduce request count from O(n^2) to O(1)
|
|
|
|
if parent_comment_id:
|
|
|
|
# Since we don't directly get all the comment data at once,
|
|
|
|
# we have to do a bit more work to get them
|
|
|
|
comment_ids = await jsonrpc_post(url, 'get_comment_replies',
|
|
|
|
comm_index=parent_comment_id, clean=False)
|
|
|
|
comment_ids = comment_ids['result']
|
|
|
|
if page_size is not None:
|
|
|
|
comment_ids = comment_ids[page_size * (page - 1): page_size * page]
|
|
|
|
# now we have to just batch request the reply comments
|
|
|
|
comments_batch = [
|
|
|
|
rpc_body('get_comment_data', index, comm_index=comment_id, better_keys=True)
|
|
|
|
for index, comment_id in enumerate(comment_ids)
|
|
|
|
]
|
|
|
|
del comment_ids
|
|
|
|
comments = await jsonrpc_batch(url, comments_batch, clean=True)
|
|
|
|
else:
|
|
|
|
# Get the content of the top level comments
|
|
|
|
comments = await jsonrpc_post(url, 'get_claim_comments', uri=claim_uri, better_keys=True)
|
|
|
|
if page_size is not None:
|
|
|
|
comments = comments[page_size * (page - 1): page_size * page]
|
|
|
|
# By now comments should be a list containing comment dicts that are supposed to be
|
|
|
|
# at the given height that was requested. The parent_id may or may not be present
|
|
|
|
# in the dicts, as they may or may not be replies to comments at a higher level
|
|
|
|
# However this is dependent purely on whether or not parent_comment_id is None or not
|
|
|
|
reply_lists = await jsonrpc_batch(url, [
|
|
|
|
rpc_body('get_comment_replies', index, comm_index=comment['comment_id'])
|
|
|
|
for index, comment in enumerate(comments)
|
|
|
|
])
|
|
|
|
response = {
|
|
|
|
'page': page,
|
|
|
|
'page_size': -1 if page_size is None else page_size,
|
|
|
|
'comments': []
|
|
|
|
}
|
|
|
|
if flat:
|
|
|
|
# If it's flat then we'll need to get the comments into an order such that
|
|
|
|
# If an element e in the list has a non-null parent id, the element before it
|
|
|
|
# is either also a reply with the same parent id, or has an id that equals e's parent id,
|
|
|
|
# in which case it's the comment that is being replied to.
|
|
|
|
# Otherwise, if it has a null parent id, then it is a top level comment.
|
|
|
|
|
|
|
|
# To do this, we create a dict that maps the index of the comment in the array
|
|
|
|
# to a list containing the comment IDs of the replies
|
|
|
|
comment_replies = {resp['id']: resp['result'] for resp in reply_lists if 'result' in resp}
|
|
|
|
|
|
|
|
# Next, we create a batch request for the actual data of all of the replies
|
|
|
|
# the id in this batch request is going to be in the form 'X:Y'
|
|
|
|
# where X is the index of the parent comment in `comments,
|
|
|
|
# and Y is index of the reply's ID within the list X maps to in `comment_replies`
|
|
|
|
full_replies_batch = [
|
|
|
|
rpc_body('get_comment_data', f'{parent_idx}:{idx}', comm_index=reply_id, better_keys=True)
|
|
|
|
for parent_idx, id_list in comment_replies.items()
|
|
|
|
for idx, reply_id in enumerate(id_list[0:max_replies_shown])
|
|
|
|
]
|
|
|
|
reply_dump = await jsonrpc_batch(url, full_replies_batch)
|
|
|
|
del full_replies_batch
|
|
|
|
# This neatly orders the response into a dict to aggregate the
|
|
|
|
# full comments by the parent comment they're replying to
|
|
|
|
#
|
|
|
|
# WARNING: The following block is going to be saving the comment dict
|
|
|
|
# objects TO `comment_replies`. This means that the lists
|
|
|
|
# stored in `comments_replies` may not hold just comments, but
|
|
|
|
# the ids of the comments who weren't requested due to the
|
|
|
|
# maximum reply limit. They need to be either cleaned out or stored
|
|
|
|
# somewhere else
|
|
|
|
|
|
|
|
for comment in reply_dump:
|
|
|
|
parent_index, reply_index = comment['id'].split(':')
|
|
|
|
parent_index, reply_index = int(parent_index), int(reply_index)
|
|
|
|
comment_replies[parent_index][reply_index] = comment['result']
|
|
|
|
|
|
|
|
for idx, parent_comment in enumerate(comments):
|
|
|
|
if 'parent_id' not in parent_comment:
|
|
|
|
parent_comment['parent_id'] = None
|
|
|
|
parent_comment['reply_count'] = len(comment_replies[idx])
|
|
|
|
parent_comment['omitted'] = 0
|
|
|
|
if len(comment_replies[idx]) > max_replies_shown:
|
|
|
|
parent_comment['omitted'] = len(comment_replies[idx]) - max_replies_shown
|
|
|
|
|
|
|
|
response['comments'].append(parent_comment)
|
|
|
|
response['comments'] += comment_replies[idx][0:max_replies_shown]
|
|
|
|
response['page_size'] = page_size if page_size is not None else -1
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
for id_list in reply_lists:
|
|
|
|
comments[id_list['id']]['reply_count'] = len(id_list['result'])
|
|
|
|
comments[id_list['id']]['omitted'] = len(id_list['result'])
|
|
|
|
response['comments'] = comments
|
|
|
|
del reply_lists
|
|
|
|
return response
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
async def jsonrpc_comment_create(self, claim_id: str, channel_id: str,
|
|
|
|
message: str, parent_comment_id: int = None) -> dict:
|
|
|
|
"""
|
|
|
|
Create and associate a comment with a claim using your channel identity.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_create <claim_id> <channel_id> <message> [--parent_comment_id=<parent_comment_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--parent_comment_id=<parent_comment_id> : (int) The ID of a comment to make a response to
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Comment object if successfully made
|
|
|
|
"""
|
|
|
|
if not 1 < len(message) <= 2000:
|
|
|
|
raise Exception(f'Message length ({len(message)}) needs to be between 2 and 2000 chars')
|
|
|
|
url = self.conf.comment_server
|
|
|
|
if parent_comment_id is not None:
|
|
|
|
comment_id = await jsonrpc_post(url, 'reply', parent_id=parent_comment_id,
|
|
|
|
poster=channel_id, message=message)
|
|
|
|
else:
|
|
|
|
claim_data = await self.jsonrpc_claim_search(claim_id=claim_id)
|
|
|
|
if 'error' not in claim_data and claim_data['total_pages'] == 1:
|
|
|
|
uri = claim_data['items'][0]['permanent_url']
|
|
|
|
comment_id = await jsonrpc_post(url, 'comment', uri=uri,
|
|
|
|
poster=channel_id, message=message)
|
|
|
|
else:
|
|
|
|
raise Exception(f"permanent_url is not in the claim_data {claim_data}\n"
|
|
|
|
f"The given claim_id ({claim_id}) may be invalid")
|
|
|
|
return await jsonrpc_post(url, 'get_comment_data', comm_index=comment_id, better_keys=True)
|
|
|
|
|
2019-03-25 00:45:54 +01:00
|
|
|
def valid_address_or_error(self, address):
|
|
|
|
try:
|
2019-03-25 00:57:45 +01:00
|
|
|
assert self.ledger.is_valid_address(address)
|
2019-03-25 00:45:54 +01:00
|
|
|
except:
|
|
|
|
raise Exception(f"'{address}' is not a valid address")
|
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
@staticmethod
|
|
|
|
def valid_stream_name_or_error(name: str):
|
|
|
|
try:
|
|
|
|
if not name:
|
|
|
|
raise Exception(
|
|
|
|
"Stream name cannot be blank."
|
|
|
|
)
|
|
|
|
parsed = parse_lbry_uri(name)
|
|
|
|
if parsed.is_channel:
|
|
|
|
raise Exception(
|
|
|
|
"Stream names cannot start with '@' symbol. This is reserved for channels claims."
|
|
|
|
)
|
|
|
|
if parsed.name != name:
|
|
|
|
raise Exception(
|
|
|
|
"Stream name has invalid characters."
|
|
|
|
)
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid stream name.")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def valid_channel_name_or_error(name: str):
|
|
|
|
try:
|
|
|
|
if not name:
|
|
|
|
raise Exception(
|
|
|
|
"Channel name cannot be blank."
|
|
|
|
)
|
|
|
|
parsed = parse_lbry_uri(name)
|
|
|
|
if not parsed.is_channel:
|
|
|
|
raise Exception("Channel names must start with '@' symbol.")
|
|
|
|
if parsed.name != name:
|
|
|
|
raise Exception("Channel name has invalid character")
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid channel name.")
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
def get_fee_address(self, kwargs: dict, claim_address: str) -> str:
|
|
|
|
if 'fee_address' in kwargs:
|
2019-03-25 00:45:54 +01:00
|
|
|
self.valid_address_or_error(kwargs['fee_address'])
|
2019-03-24 21:55:04 +01:00
|
|
|
return kwargs['fee_address']
|
2019-04-21 05:54:34 +02:00
|
|
|
if 'fee_currency' in kwargs or 'fee_amount' in kwargs:
|
|
|
|
return claim_address
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
async def get_receiving_address(self, address: str, account: LBCAccount) -> str:
|
|
|
|
if address is None:
|
|
|
|
return await account.receiving.get_or_create_usable_address()
|
2019-03-25 00:45:54 +01:00
|
|
|
self.valid_address_or_error(address)
|
2019-03-24 21:55:04 +01:00
|
|
|
return address
|
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
async def get_channel_or_none(self, account_ids: List[str], channel_id: str = None, channel_name: str = None,
|
2019-03-24 21:55:04 +01:00
|
|
|
for_signing: bool = False) -> Output:
|
2018-09-19 04:23:41 +02:00
|
|
|
if channel_id is not None:
|
2019-03-27 21:02:17 +01:00
|
|
|
return await self.get_channel_or_error(account_ids, channel_id, channel_name, for_signing)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
async def get_channel_or_error(self, account_ids: List[str], channel_id: str = None, channel_name: str = None,
|
2019-03-24 21:55:04 +01:00
|
|
|
for_signing: bool = False) -> Output:
|
2019-03-27 21:02:17 +01:00
|
|
|
if channel_id:
|
|
|
|
key, value = 'id', channel_id
|
|
|
|
elif channel_name:
|
|
|
|
key, value = 'name', channel_name
|
|
|
|
else:
|
|
|
|
raise ValueError("Couldn't find channel because a channel_id or channel_name was not provided.")
|
2019-03-24 21:55:04 +01:00
|
|
|
for account in self.get_accounts_or_all(account_ids):
|
2019-03-27 21:02:17 +01:00
|
|
|
channels = await account.get_channels(**{f'claim_{key}': value}, limit=1)
|
|
|
|
if len(channels) == 1:
|
2019-03-24 21:55:04 +01:00
|
|
|
if for_signing and channels[0].private_key is None:
|
2019-03-27 21:02:17 +01:00
|
|
|
raise Exception(f"Couldn't find private key for {key} '{value}'. ")
|
2019-03-24 21:55:04 +01:00
|
|
|
return channels[0]
|
2019-03-27 21:02:17 +01:00
|
|
|
elif len(channels) > 1:
|
2019-03-28 01:32:43 +01:00
|
|
|
raise ValueError(
|
|
|
|
f"Multiple channels found with channel_{key} '{value}', "
|
|
|
|
f"pass a channel_id to narrow it down."
|
|
|
|
)
|
|
|
|
raise ValueError(f"Couldn't find channel with channel_{key} '{value}'.")
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
def get_account_or_default(self, account_id: str, argument_name: str = "account", lbc_only=True) -> LBCAccount:
|
2018-09-19 15:58:50 +02:00
|
|
|
if account_id is None:
|
|
|
|
return self.default_account
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_error(account_id, argument_name, lbc_only)
|
2018-09-19 15:58:50 +02:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
def get_accounts_or_all(self, account_ids: List[str]) -> List[LBCAccount]:
|
2018-10-18 01:07:17 +02:00
|
|
|
return [
|
|
|
|
self.get_account_or_error(account_id)
|
|
|
|
for account_id in account_ids
|
|
|
|
] if account_ids else self.default_wallet.accounts
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
def get_account_or_error(
|
|
|
|
self, account_id: str, argument_name: str = "account", lbc_only=True) -> Optional[LBCAccount]:
|
2018-08-26 06:44:23 +02:00
|
|
|
for account in self.default_wallet.accounts:
|
2018-08-30 06:04:25 +02:00
|
|
|
if account.id == account_id:
|
2018-08-06 06:28:11 +02:00
|
|
|
if lbc_only and not isinstance(account, LBCAccount):
|
|
|
|
raise ValueError(
|
|
|
|
"Found '{}', but it's an {} ledger account. "
|
|
|
|
"'{}' requires specifying an LBC ledger account."
|
2018-09-21 15:47:06 +02:00
|
|
|
.format(account_id, account.ledger.symbol, argument_name)
|
2018-08-06 06:28:11 +02:00
|
|
|
)
|
|
|
|
return account
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"Couldn't find account: {account_id}.")
|
2018-08-06 08:53:27 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
@staticmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def get_dewies_or_error(argument: str, lbc: str, positive_value=False):
|
2018-10-03 22:38:47 +02:00
|
|
|
try:
|
2019-03-24 21:55:04 +01:00
|
|
|
dewies = lbc_to_dewies(lbc)
|
|
|
|
if positive_value and dewies <= 0:
|
|
|
|
raise ValueError(f"'{argument}' value must be greater than 0.0")
|
|
|
|
return dewies
|
2018-10-03 22:38:47 +02:00
|
|
|
except ValueError as e:
|
2019-03-24 21:55:04 +01:00
|
|
|
raise ValueError(f"Invalid value for '{argument}': {e.args[0]}")
|
2019-03-25 00:45:54 +01:00
|
|
|
|
|
|
|
async def resolve(self, *uris, **kwargs):
|
|
|
|
page = kwargs.get('page', 0)
|
|
|
|
page_size = kwargs.get('page_size', 10)
|
|
|
|
ledger: MainNetLedger = self.default_account.ledger
|
|
|
|
results = await ledger.resolve(page, page_size, *uris)
|
|
|
|
if 'error' not in results:
|
|
|
|
await self.storage.save_claims_for_resolve([
|
|
|
|
value for value in results.values() if 'error' not in value
|
|
|
|
])
|
|
|
|
return results
|
|
|
|
|
|
|
|
async def get_claims_for_name(self, name: str):
|
|
|
|
response = await self.ledger.network.get_claims_for_name(name)
|
|
|
|
resolutions = await self.resolve(*(f"{claim['name']}#{claim['claim_id']}" for claim in response['claims']))
|
|
|
|
response['claims'] = [value.get('claim', value.get('certificate')) for value in resolutions.values()]
|
|
|
|
return response
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
|
|
|
|
return {
|
|
|
|
"claim_id": txo.claim_id,
|
|
|
|
"name": name,
|
|
|
|
"amount": bid,
|
|
|
|
"address": address,
|
|
|
|
"txid": tx.id,
|
|
|
|
"nout": txo.position,
|
|
|
|
"value": claim_dict,
|
|
|
|
"height": -1,
|
|
|
|
"claim_sequence": -1,
|
|
|
|
}
|
2018-07-26 05:29:13 +02:00
|
|
|
|
2017-06-01 18:10:19 +02:00
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
def loggly_time_string(dt):
|
|
|
|
formatted_dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
|
2017-01-03 20:13:01 +01:00
|
|
|
milliseconds = str(round(dt.microsecond * (10.0 ** -5), 3))
|
2019-01-22 23:44:17 +01:00
|
|
|
return quote(formatted_dt + milliseconds + "Z")
|
2017-01-02 20:52:24 +01:00
|
|
|
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def get_loggly_query_string(installation_id):
|
2017-01-02 22:09:28 +01:00
|
|
|
base_loggly_search_url = "https://lbry.loggly.com/search#"
|
2017-01-02 20:52:24 +01:00
|
|
|
now = utils.now()
|
|
|
|
yesterday = now - utils.timedelta(days=1)
|
2017-01-02 22:09:28 +01:00
|
|
|
params = {
|
2017-02-02 16:23:17 +01:00
|
|
|
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
|
2017-01-02 22:09:28 +01:00
|
|
|
'from': loggly_time_string(yesterday),
|
|
|
|
'to': loggly_time_string(now)
|
|
|
|
}
|
2019-01-22 23:44:17 +01:00
|
|
|
data = urlencode(params)
|
2017-01-02 22:09:28 +01:00
|
|
|
return base_loggly_search_url + data
|