2016-02-29 19:25:47 +01:00
|
|
|
import os
|
2016-07-25 19:19:19 +02:00
|
|
|
import requests
|
2017-01-02 20:52:24 +01:00
|
|
|
import urllib
|
2017-03-16 20:35:54 +01:00
|
|
|
import textwrap
|
2018-08-09 03:19:15 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
from typing import Callable, Optional, List
|
2018-08-16 01:23:06 +02:00
|
|
|
from operator import itemgetter
|
2018-07-22 03:12:33 +02:00
|
|
|
from binascii import hexlify, unhexlify
|
2017-10-10 21:04:48 +02:00
|
|
|
from copy import deepcopy
|
2016-04-18 19:21:53 +02:00
|
|
|
from twisted.internet.task import LoopingCall
|
2019-01-10 19:52:50 +01:00
|
|
|
from traceback import format_exc
|
2016-02-29 19:25:47 +01:00
|
|
|
|
2018-11-04 07:24:41 +01:00
|
|
|
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
from lbrynet import __version__
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.dht.error import TimeoutError
|
2018-11-12 20:45:41 +01:00
|
|
|
from lbrynet.blob.blob_file import is_valid_blobhash
|
2018-11-09 19:06:02 +01:00
|
|
|
from lbrynet.extras import system_info
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.reflector import reupload
|
2018-12-15 21:31:02 +01:00
|
|
|
from lbrynet.extras.daemon.Components import d2f
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import FILE_MANAGER_COMPONENT, RATE_LIMITER_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, PAYMENT_RATE_COMPONENT, UPNP_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
|
|
|
|
from lbrynet.extras.daemon.Downloader import GetStream
|
|
|
|
from lbrynet.extras.daemon.Publisher import Publisher
|
2019-01-08 21:27:13 +01:00
|
|
|
from lbrynet.extras.daemon.mime_types import guess_media_type
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.wallet import LbryWalletManager
|
|
|
|
from lbrynet.extras.wallet.account import Account as LBCAccount
|
|
|
|
from lbrynet.extras.wallet.dewies import dewies_to_lbc, lbc_to_dewies
|
|
|
|
from lbrynet.p2p.StreamDescriptor import download_sd_blob
|
|
|
|
from lbrynet.p2p.Error import InsufficientFundsError, UnknownNameError, DownloadDataTimeout, DownloadSDTimeout
|
|
|
|
from lbrynet.p2p.Error import NullFundsError, NegativeFundsError, ResolveError
|
|
|
|
from lbrynet.p2p.Peer import Peer
|
|
|
|
from lbrynet.p2p.SinglePeerDownloader import SinglePeerDownloader
|
|
|
|
from lbrynet.p2p.client.StandaloneBlobDownloader import StandaloneBlobDownloader
|
2018-09-17 22:31:44 +02:00
|
|
|
from lbrynet.schema.claim import ClaimDict
|
|
|
|
from lbrynet.schema.uri import parse_lbry_uri
|
|
|
|
from lbrynet.schema.error import URIParseError, DecodeError
|
|
|
|
from lbrynet.schema.validator import validate_claim_id
|
|
|
|
from lbrynet.schema.address import decode_address
|
|
|
|
from lbrynet.schema.decode import smart_decode
|
2018-12-13 04:32:44 +01:00
|
|
|
from lbrynet.extras.daemon import analytics
|
|
|
|
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
|
|
|
from lbrynet.extras.looping_call_manager import LoopingCallManager
|
|
|
|
from lbrynet.p2p.Error import ComponentsNotStarted, ComponentStartConditionNotMet
|
|
|
|
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
import logging
|
|
|
|
import json
|
|
|
|
import inspect
|
|
|
|
import signal
|
|
|
|
from functools import wraps
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from lbrynet import utils
|
|
|
|
from lbrynet.extras.daemon.undecorated import undecorated
|
|
|
|
from lbrynet import conf
|
|
|
|
|
|
|
|
from aiohttp import web
|
|
|
|
|
2017-04-11 04:47:54 +02:00
|
|
|
|
2016-07-25 07:40:26 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
def requires(*components, **conditions):
|
|
|
|
if conditions and ["conditions"] != list(conditions.keys()):
|
|
|
|
raise SyntaxError("invalid conditions argument")
|
|
|
|
condition_names = conditions.get("conditions", [])
|
|
|
|
|
|
|
|
def _wrap(fn):
|
|
|
|
@wraps(fn)
|
|
|
|
def _inner(*args, **kwargs):
|
|
|
|
component_manager = args[0].component_manager
|
|
|
|
for condition_name in condition_names:
|
|
|
|
condition_result, err_msg = component_manager.evaluate_condition(condition_name)
|
|
|
|
if not condition_result:
|
|
|
|
raise ComponentStartConditionNotMet(err_msg)
|
|
|
|
if not component_manager.all_components_running(*components):
|
|
|
|
raise ComponentsNotStarted("the following required components have not yet started: "
|
|
|
|
"%s" % json.dumps(components))
|
|
|
|
return fn(*args, **kwargs)
|
|
|
|
return _inner
|
|
|
|
return _wrap
|
|
|
|
|
|
|
|
|
|
|
|
def deprecated(new_command=None):
|
|
|
|
def _deprecated_wrapper(f):
|
|
|
|
f.new_command = new_command
|
|
|
|
f._deprecated = True
|
|
|
|
return f
|
|
|
|
return _deprecated_wrapper
|
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
INITIALIZING_CODE = 'initializing'
|
2016-04-09 08:09:30 +02:00
|
|
|
|
2016-12-30 07:12:20 +01:00
|
|
|
# TODO: make this consistent with the stages in Downloader.py
|
2016-04-26 04:35:21 +02:00
|
|
|
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
|
|
|
|
DOWNLOAD_TIMEOUT_CODE = 'timeout'
|
|
|
|
DOWNLOAD_RUNNING_CODE = 'running'
|
|
|
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
|
|
|
STREAM_STAGES = [
|
2017-01-04 23:10:36 +01:00
|
|
|
(INITIALIZING_CODE, 'Initializing'),
|
2017-01-03 20:13:01 +01:00
|
|
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
|
|
|
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
|
|
|
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
|
|
|
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
|
|
|
|
]
|
|
|
|
|
|
|
|
CONNECTION_STATUS_CONNECTED = 'connected'
|
|
|
|
CONNECTION_STATUS_NETWORK = 'network_connection'
|
|
|
|
CONNECTION_MESSAGES = {
|
|
|
|
CONNECTION_STATUS_CONNECTED: 'No connection problems detected',
|
|
|
|
CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted",
|
|
|
|
}
|
2016-04-18 19:21:53 +02:00
|
|
|
|
2016-12-19 19:27:45 +01:00
|
|
|
SHORT_ID_LEN = 20
|
2018-02-20 04:52:15 +01:00
|
|
|
MAX_UPDATE_FEE_ESTIMATE = 0.3
|
2016-12-19 19:27:45 +01:00
|
|
|
|
2018-05-21 06:24:18 +02:00
|
|
|
DIRECTION_ASCENDING = 'asc'
|
|
|
|
DIRECTION_DESCENDING = 'desc'
|
|
|
|
DIRECTIONS = DIRECTION_ASCENDING, DIRECTION_DESCENDING
|
2016-10-19 01:09:35 +02:00
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
EMPTY_PARAMS = [{}]
|
|
|
|
LBRY_SECRET = "LBRY_SECRET"
|
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
async def maybe_paginate(get_records: Callable, get_record_count: Callable,
|
2018-10-10 02:46:41 +02:00
|
|
|
page: Optional[int], page_size: Optional[int], **constraints):
|
|
|
|
if None not in (page, page_size):
|
|
|
|
constraints.update({
|
|
|
|
"offset": page_size * (page-1),
|
|
|
|
"limit": page_size
|
|
|
|
})
|
|
|
|
return {
|
2018-10-16 21:04:20 +02:00
|
|
|
"items": await get_records(**constraints),
|
|
|
|
"total_pages": int(((await get_record_count(**constraints)) + (page_size-1)) / page_size),
|
2018-10-10 02:46:41 +02:00
|
|
|
"page": page, "page_size": page_size
|
|
|
|
}
|
2018-10-16 21:04:20 +02:00
|
|
|
return await get_records(**constraints)
|
2018-10-10 02:46:41 +02:00
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class IterableContainer:
|
2017-03-06 23:01:35 +01:00
|
|
|
def __iter__(self):
|
|
|
|
for attr in dir(self):
|
|
|
|
if not attr.startswith("_"):
|
|
|
|
yield getattr(self, attr)
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
for attr in self:
|
|
|
|
if item == attr:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class Checker:
|
2016-10-19 01:09:35 +02:00
|
|
|
"""The looping calls the daemon runs"""
|
2018-08-02 23:33:56 +02:00
|
|
|
INTERNET_CONNECTION = 'internet_connection_checker', 300
|
2018-07-25 00:24:51 +02:00
|
|
|
# CONNECTION_STATUS = 'connection_status_checker'
|
2016-10-19 01:09:35 +02:00
|
|
|
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
class _FileID(IterableContainer):
|
2016-10-19 01:09:35 +02:00
|
|
|
"""The different ways a file can be identified"""
|
|
|
|
SD_HASH = 'sd_hash'
|
|
|
|
FILE_NAME = 'file_name'
|
2017-02-20 01:22:21 +01:00
|
|
|
STREAM_HASH = 'stream_hash'
|
2017-03-06 23:01:35 +01:00
|
|
|
ROWID = "rowid"
|
2018-02-12 20:15:02 +01:00
|
|
|
CLAIM_ID = "claim_id"
|
|
|
|
OUTPOINT = "outpoint"
|
|
|
|
TXID = "txid"
|
|
|
|
NOUT = "nout"
|
|
|
|
CHANNEL_CLAIM_ID = "channel_claim_id"
|
|
|
|
CLAIM_NAME = "claim_name"
|
|
|
|
CHANNEL_NAME = "channel_name"
|
2017-03-06 23:01:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
FileID = _FileID()
|
2016-10-19 01:09:35 +02:00
|
|
|
|
|
|
|
|
2016-02-16 19:39:08 +01:00
|
|
|
# TODO add login credentials in a conf file
|
|
|
|
# TODO alert if your copy of a lbry file is out of date with the name record
|
2016-01-12 00:01:46 +01:00
|
|
|
|
2016-12-19 19:27:45 +01:00
|
|
|
|
2016-10-20 21:52:37 +02:00
|
|
|
class NoValidSearch(Exception):
|
|
|
|
pass
|
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class CheckInternetConnection:
|
2016-09-29 23:42:31 +02:00
|
|
|
def __init__(self, daemon):
|
|
|
|
self.daemon = daemon
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
self.daemon.connected_to_internet = utils.check_connection()
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class AlwaysSend:
|
2016-10-11 19:50:44 +02:00
|
|
|
def __init__(self, value_generator, *args, **kwargs):
|
|
|
|
self.value_generator = value_generator
|
|
|
|
self.args = args
|
|
|
|
self.kwargs = kwargs
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
d = defer.maybeDeferred(self.value_generator, *self.args, **self.kwargs)
|
|
|
|
d.addCallback(lambda v: (True, v))
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2018-05-25 09:09:13 +02:00
|
|
|
def sort_claim_results(claims):
|
|
|
|
claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout']))
|
|
|
|
return claims
|
|
|
|
|
|
|
|
|
2018-07-23 22:15:12 +02:00
|
|
|
def is_first_run():
|
|
|
|
if os.path.isfile(conf.settings.get_db_revision_filename()):
|
|
|
|
return False
|
2018-11-27 21:56:11 +01:00
|
|
|
if os.path.isfile(os.path.join(conf.settings.data_dir, 'lbrynet.sqlite')):
|
2018-07-23 22:15:12 +02:00
|
|
|
return False
|
2018-11-27 21:56:11 +01:00
|
|
|
if os.path.isfile(os.path.join(conf.settings.wallet_dir, 'blockchain_headers')):
|
2018-07-23 22:15:12 +02:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
DHT_HAS_CONTACTS = "dht_has_contacts"
|
|
|
|
WALLET_IS_UNLOCKED = "wallet_is_unlocked"
|
|
|
|
|
|
|
|
|
|
|
|
class DHTHasContacts(RequiredCondition):
|
|
|
|
name = DHT_HAS_CONTACTS
|
|
|
|
component = DHT_COMPONENT
|
|
|
|
message = "your node is not connected to the dht"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def evaluate(component):
|
|
|
|
return len(component.contacts) > 0
|
|
|
|
|
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
class WalletIsUnlocked(RequiredCondition):
|
2018-07-25 00:35:18 +02:00
|
|
|
name = WALLET_IS_UNLOCKED
|
|
|
|
component = WALLET_COMPONENT
|
|
|
|
message = "your wallet is locked"
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def evaluate(component):
|
2018-10-16 21:04:20 +02:00
|
|
|
return not component.check_locked()
|
2018-07-25 00:35:18 +02:00
|
|
|
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
class JSONRPCError:
|
|
|
|
# http://www.jsonrpc.org/specification#error_object
|
|
|
|
CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text.
|
|
|
|
CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object.
|
|
|
|
CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available.
|
|
|
|
CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s).
|
|
|
|
CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?)
|
|
|
|
CODE_APPLICATION_ERROR = -32500 # Generic error with our app??
|
|
|
|
CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed
|
|
|
|
|
|
|
|
MESSAGES = {
|
|
|
|
CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.",
|
|
|
|
CODE_INVALID_REQUEST: "JSON data is not a valid Request",
|
|
|
|
CODE_METHOD_NOT_FOUND: "Method Not Found",
|
|
|
|
CODE_INVALID_PARAMS: "Invalid Params",
|
|
|
|
CODE_INTERNAL_ERROR: "Internal Error",
|
|
|
|
CODE_AUTHENTICATION_ERROR: "Authentication Failed",
|
|
|
|
}
|
|
|
|
|
|
|
|
HTTP_CODES = {
|
|
|
|
CODE_INVALID_REQUEST: 400,
|
|
|
|
CODE_PARSE_ERROR: 400,
|
|
|
|
CODE_INVALID_PARAMS: 400,
|
|
|
|
CODE_METHOD_NOT_FOUND: 404,
|
|
|
|
CODE_INTERNAL_ERROR: 500,
|
|
|
|
CODE_APPLICATION_ERROR: 500,
|
|
|
|
CODE_AUTHENTICATION_ERROR: 401,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self, message, code=CODE_APPLICATION_ERROR, traceback=None, data=None):
|
|
|
|
assert isinstance(code, int), "'code' must be an int"
|
|
|
|
assert (data is None or isinstance(data, dict)), "'data' must be None or a dict"
|
|
|
|
self.code = code
|
|
|
|
if message is None:
|
|
|
|
message = self.MESSAGES[code] if code in self.MESSAGES else "API Error"
|
|
|
|
self.message = message
|
|
|
|
self.data = {} if data is None else data
|
|
|
|
self.traceback = []
|
|
|
|
if traceback is not None:
|
|
|
|
trace_lines = traceback.split("\n")
|
|
|
|
for i, t in enumerate(trace_lines):
|
|
|
|
if "--- <exception caught here> ---" in t:
|
|
|
|
if len(trace_lines) > i + 1:
|
|
|
|
self.traceback = [j for j in trace_lines[i+1:] if j]
|
|
|
|
break
|
|
|
|
|
|
|
|
def to_dict(self):
|
|
|
|
return {
|
|
|
|
'code': self.code,
|
|
|
|
'message': self.message,
|
|
|
|
'data': self.traceback
|
|
|
|
}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def create_from_exception(cls, message, code=CODE_APPLICATION_ERROR, traceback=None):
|
|
|
|
return cls(message, code=code, traceback=traceback)
|
|
|
|
|
|
|
|
|
|
|
|
class UnknownAPIMethodError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def jsonrpc_dumps_pretty(obj, **kwargs):
|
|
|
|
if isinstance(obj, JSONRPCError):
|
|
|
|
data = {"jsonrpc": "2.0", "error": obj.to_dict()}
|
|
|
|
else:
|
|
|
|
data = {"jsonrpc": "2.0", "result": obj}
|
|
|
|
return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n"
|
|
|
|
|
|
|
|
|
|
|
|
def trap(err, *to_trap):
|
|
|
|
err.trap(*to_trap)
|
|
|
|
|
|
|
|
|
|
|
|
class JSONRPCServerType(type):
|
|
|
|
def __new__(mcs, name, bases, newattrs):
|
|
|
|
klass = type.__new__(mcs, name, bases, newattrs)
|
|
|
|
klass.callable_methods = {}
|
|
|
|
klass.deprecated_methods = {}
|
|
|
|
|
|
|
|
for methodname in dir(klass):
|
|
|
|
if methodname.startswith("jsonrpc_"):
|
|
|
|
method = getattr(klass, methodname)
|
|
|
|
if not hasattr(method, '_deprecated'):
|
|
|
|
klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method})
|
|
|
|
else:
|
|
|
|
klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method})
|
|
|
|
return klass
|
|
|
|
|
|
|
|
|
|
|
|
class Daemon(metaclass=JSONRPCServerType):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-03-24 03:27:48 +01:00
|
|
|
LBRYnet daemon, a jsonrpc interface to lbry functions
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-04-12 04:28:46 +02:00
|
|
|
|
2018-07-25 00:24:51 +02:00
|
|
|
component_attributes = {
|
|
|
|
DATABASE_COMPONENT: "storage",
|
|
|
|
DHT_COMPONENT: "dht_node",
|
2018-08-26 05:20:43 +02:00
|
|
|
WALLET_COMPONENT: "wallet_manager",
|
2018-07-25 00:24:51 +02:00
|
|
|
FILE_MANAGER_COMPONENT: "file_manager",
|
2018-07-25 21:33:43 +02:00
|
|
|
EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager",
|
|
|
|
PAYMENT_RATE_COMPONENT: "payment_rate_manager",
|
|
|
|
RATE_LIMITER_COMPONENT: "rate_limiter",
|
|
|
|
BLOB_COMPONENT: "blob_manager",
|
|
|
|
UPNP_COMPONENT: "upnp"
|
2018-07-25 00:24:51 +02:00
|
|
|
}
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
allowed_during_startup = []
|
|
|
|
|
2018-07-20 22:46:15 +02:00
|
|
|
def __init__(self, analytics_manager=None, component_manager=None):
|
2018-10-30 18:38:12 +01:00
|
|
|
to_skip = conf.settings['components_to_skip']
|
2018-07-23 22:15:12 +02:00
|
|
|
if 'reflector' not in to_skip and not conf.settings['run_reflector_server']:
|
|
|
|
to_skip.append('reflector')
|
2018-07-25 00:24:51 +02:00
|
|
|
looping_calls = {
|
|
|
|
Checker.INTERNET_CONNECTION[0]: (LoopingCall(CheckInternetConnection(self)),
|
|
|
|
Checker.INTERNET_CONNECTION[1])
|
|
|
|
}
|
2018-12-13 04:32:44 +01:00
|
|
|
use_authentication = conf.settings['use_auth_http']
|
|
|
|
use_https = conf.settings['use_https']
|
|
|
|
self.analytics_manager = analytics_manager or analytics.Manager.new_instance()
|
|
|
|
self.component_manager = component_manager or ComponentManager(
|
|
|
|
analytics_manager=self.analytics_manager,
|
|
|
|
skip_components=to_skip or [],
|
|
|
|
)
|
|
|
|
self.looping_call_manager = LoopingCallManager({n: lc for n, (lc, t) in (looping_calls or {}).items()})
|
|
|
|
self._looping_call_times = {n: t for n, (lc, t) in (looping_calls or {}).items()}
|
|
|
|
self._use_authentication = use_authentication or conf.settings['use_auth_http']
|
|
|
|
self._use_https = use_https or conf.settings['use_https']
|
|
|
|
self.listening_port = None
|
2018-12-15 21:31:02 +01:00
|
|
|
self._component_setup_task = None
|
2018-12-13 04:32:44 +01:00
|
|
|
self.announced_startup = False
|
|
|
|
self.sessions = {}
|
2018-07-23 22:15:12 +02:00
|
|
|
self.is_first_run = is_first_run()
|
2017-11-28 19:47:44 +01:00
|
|
|
|
2018-07-20 21:35:09 +02:00
|
|
|
# TODO: move this to a component
|
2016-04-18 19:21:53 +02:00
|
|
|
self.connected_to_internet = True
|
2017-01-03 20:13:01 +01:00
|
|
|
self.connection_status_code = None
|
2016-09-16 06:14:25 +02:00
|
|
|
|
2018-07-24 18:22:11 +02:00
|
|
|
# components
|
2018-07-20 21:35:09 +02:00
|
|
|
# TODO: delete these, get the components where needed
|
2018-07-24 18:22:11 +02:00
|
|
|
self.storage = None
|
|
|
|
self.dht_node = None
|
2018-09-26 04:40:52 +02:00
|
|
|
self.wallet_manager: LbryWalletManager = None
|
2018-07-24 18:22:11 +02:00
|
|
|
self.file_manager = None
|
|
|
|
self.exchange_rate_manager = None
|
2018-07-25 21:33:43 +02:00
|
|
|
self.payment_rate_manager = None
|
|
|
|
self.rate_limiter = None
|
|
|
|
self.blob_manager = None
|
|
|
|
self.upnp = None
|
2018-07-24 18:22:11 +02:00
|
|
|
|
2018-07-20 21:35:09 +02:00
|
|
|
# TODO: delete this
|
2016-10-19 06:12:44 +02:00
|
|
|
self.streams = {}
|
2016-05-14 23:36:30 +02:00
|
|
|
|
2019-01-09 05:54:18 +01:00
|
|
|
logging.getLogger('aiohttp.access').setLevel(logging.WARN)
|
2018-12-13 04:32:44 +01:00
|
|
|
self.app = web.Application()
|
|
|
|
self.app.router.add_get('/lbryapi', self.handle_old_jsonrpc)
|
2018-12-15 21:31:02 +01:00
|
|
|
self.app.router.add_post('/lbryapi', self.handle_old_jsonrpc)
|
|
|
|
self.app.router.add_post('/', self.handle_old_jsonrpc)
|
2018-12-13 04:32:44 +01:00
|
|
|
self.handler = self.app.make_handler()
|
|
|
|
self.server = None
|
|
|
|
|
|
|
|
async def start_listening(self):
|
|
|
|
try:
|
|
|
|
self.server = await asyncio.get_event_loop().create_server(
|
|
|
|
self.handler, conf.settings['api_host'], conf.settings['api_port']
|
|
|
|
)
|
2019-01-07 22:34:22 +01:00
|
|
|
log.info('lbrynet API listening on TCP %s:%i', *self.server.sockets[0].getsockname()[:2])
|
2018-12-13 04:32:44 +01:00
|
|
|
await self.setup()
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_server_startup_success()
|
2018-12-13 04:32:44 +01:00
|
|
|
except OSError:
|
|
|
|
log.error('lbrynet API failed to bind TCP %s:%i for listening. Daemon is already running or this port is '
|
|
|
|
'already in use by another application.', conf.settings['api_host'], conf.settings['api_port'])
|
|
|
|
except defer.CancelledError:
|
|
|
|
log.info("shutting down before finished starting")
|
|
|
|
except Exception as err:
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_server_startup_error(str(err))
|
2018-12-13 04:32:44 +01:00
|
|
|
log.exception('Failed to start lbrynet-daemon')
|
|
|
|
|
|
|
|
async def setup(self):
|
|
|
|
log.info("Starting lbrynet-daemon")
|
|
|
|
log.info("Platform: %s", json.dumps(system_info.get_platform()))
|
|
|
|
|
|
|
|
if not self.analytics_manager.is_started:
|
|
|
|
self.analytics_manager.start()
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_server_startup()
|
2018-12-13 04:32:44 +01:00
|
|
|
for lc_name, lc_time in self._looping_call_times.items():
|
|
|
|
self.looping_call_manager.start(lc_name, lc_time)
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
def update_attribute(component):
|
2018-12-13 04:32:44 +01:00
|
|
|
setattr(self, self.component_attributes[component.component_name], component.component)
|
|
|
|
|
|
|
|
kwargs = {component: update_attribute for component in self.component_attributes.keys()}
|
2018-12-15 21:31:02 +01:00
|
|
|
self._component_setup_task = self.component_manager.setup(**kwargs)
|
|
|
|
await self._component_setup_task
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
log.info("Started lbrynet-daemon")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _already_shutting_down(sig_num, frame):
|
|
|
|
log.info("Already shutting down")
|
|
|
|
|
|
|
|
async def shutdown(self):
|
|
|
|
self._stop_streams()
|
|
|
|
# ignore INT/TERM signals once shutdown has started
|
|
|
|
signal.signal(signal.SIGINT, self._already_shutting_down)
|
|
|
|
signal.signal(signal.SIGTERM, self._already_shutting_down)
|
|
|
|
if self.listening_port:
|
|
|
|
self.listening_port.stopListening()
|
|
|
|
if self.server is not None:
|
|
|
|
self.server.close()
|
|
|
|
await self.server.wait_closed()
|
|
|
|
await self.app.shutdown()
|
|
|
|
await self.handler.shutdown(60.0)
|
|
|
|
await self.app.cleanup()
|
|
|
|
if self.analytics_manager:
|
2019-01-10 00:21:59 +01:00
|
|
|
self.analytics_manager.shutdown()
|
2018-12-13 04:32:44 +01:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
self._component_setup_task.cancel()
|
|
|
|
except (AttributeError, asyncio.CancelledError):
|
2018-12-13 04:32:44 +01:00
|
|
|
pass
|
|
|
|
if self.component_manager is not None:
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.component_manager.stop()
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
async def handle_old_jsonrpc(self, request):
|
|
|
|
data = await request.json()
|
2019-01-10 19:52:50 +01:00
|
|
|
result = await self._process_rpc_call(data)
|
|
|
|
return web.Response(
|
|
|
|
text=jsonrpc_dumps_pretty(result, ledger=self.ledger),
|
|
|
|
content_type='application/json'
|
|
|
|
)
|
|
|
|
|
|
|
|
async def _process_rpc_call(self, data):
|
2018-12-13 04:32:44 +01:00
|
|
|
args = data.get('params', {})
|
|
|
|
|
|
|
|
try:
|
|
|
|
function_name = data['method']
|
|
|
|
except KeyError:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
"Missing 'method' value in request.", JSONRPCError.CODE_METHOD_NOT_FOUND
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
fn = self._get_jsonrpc_method(function_name)
|
|
|
|
except UnknownAPIMethodError:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
f"Invalid method requested: {function_name}.", JSONRPCError.CODE_METHOD_NOT_FOUND
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
if args in (EMPTY_PARAMS, []):
|
|
|
|
_args, _kwargs = (), {}
|
|
|
|
elif isinstance(args, dict):
|
|
|
|
_args, _kwargs = (), args
|
|
|
|
elif len(args) == 1 and isinstance(args[0], dict):
|
|
|
|
# TODO: this is for backwards compatibility. Remove this once API and UI are updated
|
|
|
|
# TODO: also delete EMPTY_PARAMS then
|
|
|
|
_args, _kwargs = (), args[0]
|
|
|
|
elif len(args) == 2 and isinstance(args[0], list) and isinstance(args[1], dict):
|
|
|
|
_args, _kwargs = args
|
|
|
|
else:
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
f"Invalid parameters format.", JSONRPCError.CODE_INVALID_PARAMS
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
params_error, erroneous_params = self._check_params(fn, _args, _kwargs)
|
|
|
|
if params_error is not None:
|
|
|
|
params_error_message = '{} for {} command: {}'.format(
|
|
|
|
params_error, function_name, ', '.join(erroneous_params)
|
|
|
|
)
|
|
|
|
log.warning(params_error_message)
|
2019-01-10 19:52:50 +01:00
|
|
|
return JSONRPCError(
|
|
|
|
params_error_message, JSONRPCError.CODE_INVALID_PARAMS
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
2019-01-10 19:52:50 +01:00
|
|
|
try:
|
|
|
|
result = fn(self, *_args, **_kwargs)
|
|
|
|
if asyncio.iscoroutine(result):
|
|
|
|
result = await result
|
|
|
|
return result
|
|
|
|
except Exception as e: # pylint: disable=broad-except
|
|
|
|
return JSONRPCError(
|
|
|
|
str(e), JSONRPCError.CODE_APPLICATION_ERROR, format_exc()
|
|
|
|
)
|
2018-12-13 04:32:44 +01:00
|
|
|
|
|
|
|
def _verify_method_is_callable(self, function_path):
|
|
|
|
if function_path not in self.callable_methods:
|
|
|
|
raise UnknownAPIMethodError(function_path)
|
|
|
|
|
|
|
|
def _get_jsonrpc_method(self, function_path):
|
|
|
|
if function_path in self.deprecated_methods:
|
|
|
|
new_command = self.deprecated_methods[function_path].new_command
|
|
|
|
log.warning('API function \"%s\" is deprecated, please update to use \"%s\"',
|
|
|
|
function_path, new_command)
|
|
|
|
function_path = new_command
|
|
|
|
self._verify_method_is_callable(function_path)
|
|
|
|
return self.callable_methods.get(function_path)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _check_params(function, args_tup, args_dict):
|
|
|
|
argspec = inspect.getfullargspec(undecorated(function))
|
|
|
|
num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults)
|
|
|
|
|
|
|
|
duplicate_params = [
|
|
|
|
duplicate_param
|
|
|
|
for duplicate_param in argspec.args[1:len(args_tup) + 1]
|
|
|
|
if duplicate_param in args_dict
|
|
|
|
]
|
|
|
|
|
|
|
|
if duplicate_params:
|
|
|
|
return 'Duplicate parameters', duplicate_params
|
|
|
|
|
|
|
|
missing_required_params = [
|
|
|
|
required_param
|
|
|
|
for required_param in argspec.args[len(args_tup)+1:-num_optional_params]
|
|
|
|
if required_param not in args_dict
|
|
|
|
]
|
|
|
|
if len(missing_required_params):
|
|
|
|
return 'Missing required parameters', missing_required_params
|
|
|
|
|
|
|
|
extraneous_params = [] if argspec.varkw is not None else [
|
|
|
|
extra_param
|
|
|
|
for extra_param in args_dict
|
|
|
|
if extra_param not in argspec.args[1:]
|
|
|
|
]
|
|
|
|
if len(extraneous_params):
|
|
|
|
return 'Extraneous parameters', extraneous_params
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@property
|
2018-08-26 06:44:23 +02:00
|
|
|
def default_wallet(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
try:
|
|
|
|
return self.wallet_manager.default_wallet
|
|
|
|
except AttributeError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
@property
|
2018-08-26 06:44:23 +02:00
|
|
|
def default_account(self):
|
2018-08-26 05:20:43 +02:00
|
|
|
try:
|
|
|
|
return self.wallet_manager.default_account
|
|
|
|
except AttributeError:
|
|
|
|
return None
|
|
|
|
|
2018-07-05 04:16:02 +02:00
|
|
|
@property
|
|
|
|
def ledger(self):
|
2018-08-16 01:23:06 +02:00
|
|
|
try:
|
2018-08-26 05:20:43 +02:00
|
|
|
return self.wallet_manager.default_account.ledger
|
2018-08-16 01:23:06 +02:00
|
|
|
except AttributeError:
|
|
|
|
return None
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2017-09-28 19:51:20 +02:00
|
|
|
def _stop_streams(self):
|
|
|
|
"""stop pending GetStream downloads"""
|
2018-07-12 05:18:59 +02:00
|
|
|
for sd_hash, stream in self.streams.items():
|
2017-09-28 19:51:20 +02:00
|
|
|
stream.cancel(reason="daemon shutdown")
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _download_blob(self, blob_hash, rate_manager=None, timeout=None):
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
Download a blob
|
|
|
|
|
|
|
|
:param blob_hash (str): blob hash
|
|
|
|
:param rate_manager (PaymentRateManager), optional: the payment rate manager to use,
|
|
|
|
defaults to session.payment_rate_manager
|
|
|
|
:param timeout (int): blob timeout
|
|
|
|
:return: BlobFile
|
|
|
|
"""
|
2017-04-10 19:26:47 +02:00
|
|
|
if not blob_hash:
|
|
|
|
raise Exception("Nothing to download")
|
2016-08-02 08:54:04 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
rate_manager = rate_manager or self.payment_rate_manager
|
2017-02-16 05:39:17 +01:00
|
|
|
timeout = timeout or 30
|
2018-02-22 01:09:10 +01:00
|
|
|
downloader = StandaloneBlobDownloader(
|
2018-10-30 18:41:38 +01:00
|
|
|
blob_hash, self.blob_manager, self.component_manager.peer_finder, self.rate_limiter,
|
2018-08-26 05:20:43 +02:00
|
|
|
rate_manager, self.wallet_manager, timeout
|
2018-02-22 01:09:10 +01:00
|
|
|
)
|
2018-12-15 21:31:02 +01:00
|
|
|
return await d2f(downloader.download())
|
2016-08-02 05:15:32 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _get_stream_analytics_report(self, claim_dict):
|
2018-08-10 23:23:50 +02:00
|
|
|
sd_hash = claim_dict.source_hash.decode()
|
2017-09-27 23:02:36 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
2017-09-28 19:54:01 +02:00
|
|
|
except Exception:
|
2017-09-27 23:02:36 +02:00
|
|
|
stream_hash = None
|
|
|
|
report = {
|
|
|
|
"sd_hash": sd_hash,
|
|
|
|
"stream_hash": stream_hash,
|
|
|
|
}
|
|
|
|
blobs = {}
|
2017-09-28 19:54:01 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
sd_host = await d2f(self.blob_manager.get_host_downloaded_from(sd_hash))
|
2017-09-28 19:54:01 +02:00
|
|
|
except Exception:
|
|
|
|
sd_host = None
|
2017-09-27 23:02:36 +02:00
|
|
|
report["sd_blob"] = sd_host
|
|
|
|
if stream_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
blob_infos = await self.storage.get_blobs_for_stream(stream_hash)
|
2017-09-27 23:02:36 +02:00
|
|
|
report["known_blobs"] = len(blob_infos)
|
|
|
|
else:
|
|
|
|
blob_infos = []
|
|
|
|
report["known_blobs"] = 0
|
2017-10-02 17:21:51 +02:00
|
|
|
# for blob_hash, blob_num, iv, length in blob_infos:
|
|
|
|
# try:
|
|
|
|
# host = yield self.session.blob_manager.get_host_downloaded_from(blob_hash)
|
|
|
|
# except Exception:
|
|
|
|
# host = None
|
|
|
|
# if host:
|
|
|
|
# blobs[blob_num] = host
|
|
|
|
# report["blobs"] = json.dumps(blobs)
|
2018-12-15 21:31:02 +01:00
|
|
|
return report
|
2017-09-27 23:02:36 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _download_name(self, name, claim_dict, sd_hash, txid, nout, timeout=None, file_name=None):
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
2016-05-04 11:20:38 +02:00
|
|
|
Add a lbry file to the file manager, start the download, and return the new lbry file.
|
|
|
|
If it already exists in the file manager, return the existing lbry file
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
2017-03-29 04:04:13 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _download_finished(download_id, name, claim_dict):
|
|
|
|
report = await self._get_stream_analytics_report(claim_dict)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_download_finished(download_id, name, report, claim_dict)
|
|
|
|
await self.analytics_manager.send_new_download_success(download_id, name, claim_dict)
|
2017-12-07 00:27:42 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _download_failed(error, download_id, name, claim_dict):
|
|
|
|
report = await self._get_stream_analytics_report(claim_dict)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_download_errored(error, download_id, name, claim_dict,
|
2017-09-28 19:53:12 +02:00
|
|
|
report)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_new_download_fail(download_id, name, claim_dict, error)
|
2017-09-28 19:53:12 +02:00
|
|
|
|
2017-11-06 18:26:42 +01:00
|
|
|
if sd_hash in self.streams:
|
|
|
|
downloader = self.streams[sd_hash]
|
2018-12-15 21:31:02 +01:00
|
|
|
return await d2f(downloader.finished_deferred)
|
2017-04-07 02:40:55 +02:00
|
|
|
else:
|
|
|
|
download_id = utils.random_string()
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_download_started(download_id, name, claim_dict)
|
|
|
|
await self.analytics_manager.send_new_download_start(download_id, name, claim_dict)
|
2018-08-03 15:36:03 +02:00
|
|
|
self.streams[sd_hash] = GetStream(
|
2018-10-30 18:41:38 +01:00
|
|
|
self.file_manager.sd_identifier, self.wallet_manager, self.exchange_rate_manager, self.blob_manager,
|
|
|
|
self.component_manager.peer_finder, self.rate_limiter, self.payment_rate_manager, self.storage,
|
2018-08-03 15:36:03 +02:00
|
|
|
conf.settings['max_key_fee'], conf.settings['disable_max_key_fee'], conf.settings['data_rate'],
|
|
|
|
timeout
|
|
|
|
)
|
2017-04-07 02:40:55 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_file, finished_deferred = await d2f(self.streams[sd_hash].start(
|
2018-02-12 20:13:30 +01:00
|
|
|
claim_dict, name, txid, nout, file_name
|
2018-12-15 21:31:02 +01:00
|
|
|
))
|
2018-02-12 20:13:30 +01:00
|
|
|
finished_deferred.addCallbacks(
|
2019-01-07 21:44:27 +01:00
|
|
|
lambda _: asyncio.create_task(_download_finished(download_id, name, claim_dict)),
|
|
|
|
lambda e: asyncio.create_task(_download_failed(e, download_id, name, claim_dict))
|
2018-02-12 20:13:30 +01:00
|
|
|
)
|
2018-12-15 21:31:02 +01:00
|
|
|
result = await self._get_lbry_file_dict(lbry_file)
|
2017-09-28 19:53:12 +02:00
|
|
|
except Exception as err:
|
2018-12-15 21:31:02 +01:00
|
|
|
await _download_failed(err, download_id, name, claim_dict)
|
2017-09-28 19:53:12 +02:00
|
|
|
if isinstance(err, (DownloadDataTimeout, DownloadSDTimeout)):
|
|
|
|
log.warning('Failed to get %s (%s)', name, err)
|
|
|
|
else:
|
|
|
|
log.error('Failed to get %s (%s)', name, err)
|
2018-06-19 19:38:25 +02:00
|
|
|
if self.streams[sd_hash].downloader and self.streams[sd_hash].code != 'running':
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.streams[sd_hash].downloader.stop(err))
|
2018-08-10 23:23:50 +02:00
|
|
|
result = {'error': str(err)}
|
2017-09-28 19:54:01 +02:00
|
|
|
finally:
|
2017-11-06 18:26:42 +01:00
|
|
|
del self.streams[sd_hash]
|
2018-12-15 21:31:02 +01:00
|
|
|
return result
|
2016-07-21 17:44:59 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
async def _publish_stream(self, account, name, bid, claim_dict, file_path=None, certificate=None,
|
2017-06-12 19:32:01 +02:00
|
|
|
claim_address=None, change_address=None):
|
2018-08-03 15:36:03 +02:00
|
|
|
publisher = Publisher(
|
2018-10-18 01:07:17 +02:00
|
|
|
account, self.blob_manager, self.payment_rate_manager, self.storage,
|
2018-08-26 05:25:02 +02:00
|
|
|
self.file_manager, self.wallet_manager, certificate
|
2018-08-03 15:36:03 +02:00
|
|
|
)
|
2017-05-29 22:38:08 +02:00
|
|
|
parse_lbry_uri(name)
|
2017-02-09 22:12:30 +01:00
|
|
|
if not file_path:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(
|
|
|
|
claim_dict['stream']['source']['source'])
|
2018-10-16 21:04:20 +02:00
|
|
|
tx = await publisher.publish_stream(name, bid, claim_dict, stream_hash, claim_address)
|
2017-02-09 22:12:30 +01:00
|
|
|
else:
|
2018-10-16 21:04:20 +02:00
|
|
|
tx = await publisher.create_and_publish_stream(name, bid, claim_dict, file_path, claim_address)
|
2017-03-16 22:48:28 +01:00
|
|
|
if conf.settings['reflect_uploads']:
|
2018-03-19 18:36:53 +01:00
|
|
|
d = reupload.reflect_file(publisher.lbry_file)
|
2017-03-16 22:48:28 +01:00
|
|
|
d.addCallbacks(lambda _: log.info("Reflected new publication to lbry://%s", name),
|
|
|
|
log.exception)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_claim_action('publish')
|
2018-07-12 19:23:18 +02:00
|
|
|
nout = 0
|
2018-08-04 18:10:41 +02:00
|
|
|
txo = tx.outputs[nout]
|
2018-08-16 01:23:06 +02:00
|
|
|
log.info("Success! Published to lbry://%s txid: %s nout: %d", name, tx.id, nout)
|
2018-10-16 21:04:20 +02:00
|
|
|
return {
|
2018-07-12 19:23:18 +02:00
|
|
|
"success": True,
|
2018-08-16 01:23:06 +02:00
|
|
|
"tx": tx,
|
2018-08-04 18:10:41 +02:00
|
|
|
"claim_id": txo.claim_id,
|
2018-08-16 01:23:06 +02:00
|
|
|
"claim_address": self.ledger.hash160_to_address(txo.script.values['pubkey_hash']),
|
|
|
|
"output": tx.outputs[nout]
|
2018-10-16 21:04:20 +02:00
|
|
|
}
|
2016-01-12 00:01:46 +01:00
|
|
|
|
2019-01-07 23:02:20 +01:00
|
|
|
async def _get_or_download_sd_blob(self, blob, sd_hash):
|
2016-11-30 22:23:48 +01:00
|
|
|
if blob:
|
2018-07-25 21:33:43 +02:00
|
|
|
return self.blob_manager.get_blob(blob[0])
|
2019-01-07 23:02:20 +01:00
|
|
|
return await d2f(download_sd_blob(
|
2018-10-30 18:41:38 +01:00
|
|
|
sd_hash.decode(), self.blob_manager, self.component_manager.peer_finder, self.rate_limiter,
|
2018-10-17 17:11:27 +02:00
|
|
|
self.payment_rate_manager, self.wallet_manager, timeout=conf.settings['peer_search_timeout'],
|
2018-08-26 05:25:02 +02:00
|
|
|
download_mirrors=conf.settings['download_mirrors']
|
2019-01-07 23:02:20 +01:00
|
|
|
))
|
2016-11-30 22:23:48 +01:00
|
|
|
|
|
|
|
def get_or_download_sd_blob(self, sd_hash):
|
2016-12-07 16:10:46 +01:00
|
|
|
"""Return previously downloaded sd blob if already in the blob
|
|
|
|
manager, otherwise download and return it
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
2019-01-07 23:02:20 +01:00
|
|
|
return self._get_or_download_sd_blob(
|
|
|
|
self.blob_manager.completed_blobs([sd_hash.decode()]), sd_hash
|
|
|
|
)
|
2016-11-30 22:23:48 +01:00
|
|
|
|
|
|
|
def get_size_from_sd_blob(self, sd_blob):
|
|
|
|
"""
|
|
|
|
Get total stream size in bytes from a sd blob
|
|
|
|
"""
|
|
|
|
|
2018-11-07 21:15:05 +01:00
|
|
|
d = self.file_manager.sd_identifier.get_metadata_for_sd_blob(sd_blob)
|
2016-11-30 22:23:48 +01:00
|
|
|
d.addCallback(lambda metadata: metadata.validator.info_to_show())
|
|
|
|
d.addCallback(lambda info: int(dict(info)['stream_size']))
|
|
|
|
return d
|
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
def _get_est_cost_from_stream_size(self, size):
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Calculate estimated LBC cost for a stream given its size in bytes
|
|
|
|
"""
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
if self.payment_rate_manager.generous:
|
2016-11-30 22:23:48 +01:00
|
|
|
return 0.0
|
2017-01-17 04:23:20 +01:00
|
|
|
return size / (10 ** 6) * conf.settings['data_rate']
|
2016-11-30 22:23:48 +01:00
|
|
|
|
2018-10-17 17:11:27 +02:00
|
|
|
async def get_est_cost_using_known_size(self, uri, size):
|
2016-12-02 20:39:01 +01:00
|
|
|
"""
|
|
|
|
Calculate estimated LBC cost for a stream given its size in bytes
|
|
|
|
"""
|
|
|
|
cost = self._get_est_cost_from_stream_size(size)
|
2018-10-17 17:11:27 +02:00
|
|
|
resolved = await self.wallet_manager.resolve(uri)
|
2017-06-09 19:47:13 +02:00
|
|
|
|
|
|
|
if uri in resolved and 'claim' in resolved[uri]:
|
|
|
|
claim = ClaimDict.load_dict(resolved[uri]['claim']['value'])
|
2017-04-12 20:46:27 +02:00
|
|
|
final_fee = self._add_key_fee_to_est_data_cost(claim.source_fee, cost)
|
2018-10-17 17:11:27 +02:00
|
|
|
return final_fee
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2019-01-07 23:02:20 +01:00
|
|
|
async def get_est_cost_from_sd_hash(self, sd_hash):
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Get estimated cost from a sd hash
|
|
|
|
"""
|
2019-01-07 23:02:20 +01:00
|
|
|
sd_blob = await self.get_or_download_sd_blob(sd_hash)
|
|
|
|
stream_size = await d2f(self.get_size_from_sd_blob(sd_blob))
|
|
|
|
return self._get_est_cost_from_stream_size(stream_size)
|
2016-11-30 22:23:48 +01:00
|
|
|
|
2019-01-07 23:02:20 +01:00
|
|
|
async def _get_est_cost_from_metadata(self, metadata, name):
|
|
|
|
try:
|
|
|
|
return self._add_key_fee_to_est_data_cost(
|
|
|
|
metadata.source_fee, await self.get_est_cost_from_sd_hash(metadata.source_hash)
|
|
|
|
)
|
|
|
|
except:
|
|
|
|
log.warning("Timeout getting blob for cost est for lbry://%s, using only key fee", name)
|
|
|
|
return 0.0
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2017-04-12 20:46:27 +02:00
|
|
|
def _add_key_fee_to_est_data_cost(self, fee, data_cost):
|
2017-05-30 23:07:23 +02:00
|
|
|
fee_amount = 0.0 if not fee else self.exchange_rate_manager.convert_currency(fee.currency,
|
|
|
|
"LBC",
|
|
|
|
fee.amount)
|
2016-12-02 20:39:01 +01:00
|
|
|
return data_cost + fee_amount
|
|
|
|
|
2018-10-17 17:11:27 +02:00
|
|
|
async def get_est_cost_from_uri(self, uri):
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Resolve a name and return the estimated stream cost
|
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
|
2018-10-17 17:11:27 +02:00
|
|
|
resolved = await self.wallet_manager.resolve(uri)
|
2017-06-09 19:47:13 +02:00
|
|
|
if resolved:
|
|
|
|
claim_response = resolved[uri]
|
|
|
|
else:
|
2017-04-11 04:47:54 +02:00
|
|
|
claim_response = None
|
|
|
|
|
|
|
|
if claim_response and 'claim' in claim_response:
|
2017-04-18 18:46:41 +02:00
|
|
|
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
|
2017-04-13 18:59:35 +02:00
|
|
|
claim_value = ClaimDict.load_dict(claim_response['claim']['value'])
|
2019-01-07 23:02:20 +01:00
|
|
|
cost = await self._get_est_cost_from_metadata(claim_value, uri)
|
|
|
|
return round(cost, 5)
|
2017-04-13 18:59:35 +02:00
|
|
|
else:
|
|
|
|
log.warning("Failed to estimate cost for %s", uri)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2017-04-11 04:47:54 +02:00
|
|
|
def get_est_cost(self, uri, size=None):
|
2016-12-07 16:10:46 +01:00
|
|
|
"""Get a cost estimate for a lbry stream, if size is not provided the
|
|
|
|
sd blob will be downloaded to determine the stream size
|
|
|
|
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
if size is not None:
|
2017-04-11 04:47:54 +02:00
|
|
|
return self.get_est_cost_using_known_size(uri, size)
|
|
|
|
return self.get_est_cost_from_uri(uri)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _get_lbry_file_dict(self, lbry_file):
|
2018-07-22 03:12:33 +02:00
|
|
|
key = hexlify(lbry_file.key) if lbry_file.key else None
|
2019-01-11 04:05:39 +01:00
|
|
|
download_directory = lbry_file.download_directory
|
|
|
|
if not os.path.exists(download_directory):
|
|
|
|
download_directory = conf.settings.download_dir
|
|
|
|
full_path = os.path.join(download_directory, lbry_file.file_name)
|
2019-01-08 21:27:13 +01:00
|
|
|
mime_type = guess_media_type(lbry_file.file_name)
|
2017-03-06 23:01:35 +01:00
|
|
|
if os.path.isfile(full_path):
|
|
|
|
with open(full_path) as written_file:
|
|
|
|
written_file.seek(0, os.SEEK_END)
|
|
|
|
written_bytes = written_file.tell()
|
|
|
|
else:
|
2017-09-12 22:00:32 +02:00
|
|
|
written_bytes = 0
|
2016-04-24 10:42:42 +02:00
|
|
|
|
2019-01-07 08:52:53 +01:00
|
|
|
size = await lbry_file.get_total_bytes()
|
|
|
|
file_status = await lbry_file.status()
|
2018-09-07 08:57:44 +02:00
|
|
|
num_completed = file_status.num_completed
|
|
|
|
num_known = file_status.num_known
|
|
|
|
status = file_status.running_status
|
2017-04-09 22:10:07 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return {
|
2017-03-06 23:01:35 +01:00
|
|
|
'completed': lbry_file.completed,
|
|
|
|
'file_name': lbry_file.file_name,
|
2019-01-11 04:05:39 +01:00
|
|
|
'download_directory': download_directory,
|
2017-03-06 23:01:35 +01:00
|
|
|
'points_paid': lbry_file.points_paid,
|
|
|
|
'stopped': lbry_file.stopped,
|
|
|
|
'stream_hash': lbry_file.stream_hash,
|
|
|
|
'stream_name': lbry_file.stream_name,
|
|
|
|
'suggested_file_name': lbry_file.suggested_file_name,
|
|
|
|
'sd_hash': lbry_file.sd_hash,
|
|
|
|
'download_path': full_path,
|
|
|
|
'mime_type': mime_type,
|
|
|
|
'key': key,
|
|
|
|
'total_bytes': size,
|
|
|
|
'written_bytes': written_bytes,
|
2017-12-07 19:54:18 +01:00
|
|
|
'blobs_completed': num_completed,
|
|
|
|
'blobs_in_stream': num_known,
|
|
|
|
'status': status,
|
2018-02-12 20:15:02 +01:00
|
|
|
'claim_id': lbry_file.claim_id,
|
|
|
|
'txid': lbry_file.txid,
|
|
|
|
'nout': lbry_file.nout,
|
|
|
|
'outpoint': lbry_file.outpoint,
|
|
|
|
'metadata': lbry_file.metadata,
|
|
|
|
'channel_claim_id': lbry_file.channel_claim_id,
|
|
|
|
'channel_name': lbry_file.channel_name,
|
|
|
|
'claim_name': lbry_file.claim_name
|
2017-04-09 22:10:07 +02:00
|
|
|
}
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _get_lbry_file(self, search_by, val, return_json=False):
|
2017-03-06 23:01:35 +01:00
|
|
|
lbry_file = None
|
|
|
|
if search_by in FileID:
|
2018-07-24 18:22:11 +02:00
|
|
|
for l_f in self.file_manager.lbry_files:
|
2017-03-06 23:01:35 +01:00
|
|
|
if l_f.__dict__.get(search_by) == val:
|
|
|
|
lbry_file = l_f
|
|
|
|
break
|
|
|
|
else:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise NoValidSearch(f'{search_by} is not a valid search operation')
|
2017-03-06 23:01:35 +01:00
|
|
|
if return_json and lbry_file:
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_file = await self._get_lbry_file_dict(lbry_file)
|
|
|
|
return lbry_file
|
2017-02-20 01:22:21 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _get_lbry_files(self, return_json=False, **kwargs):
|
2018-07-24 18:22:11 +02:00
|
|
|
lbry_files = list(self.file_manager.lbry_files)
|
2017-03-06 23:01:35 +01:00
|
|
|
if kwargs:
|
|
|
|
for search_type, value in iter_lbry_file_search_values(kwargs):
|
|
|
|
lbry_files = [l_f for l_f in lbry_files if l_f.__dict__[search_type] == value]
|
2017-03-08 20:04:40 +01:00
|
|
|
if return_json:
|
2017-03-06 23:01:35 +01:00
|
|
|
file_dicts = []
|
|
|
|
for lbry_file in lbry_files:
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_file_dict = await self._get_lbry_file_dict(lbry_file)
|
2017-03-06 23:01:35 +01:00
|
|
|
file_dicts.append(lbry_file_dict)
|
|
|
|
lbry_files = file_dicts
|
2017-06-02 20:00:13 +02:00
|
|
|
log.debug("Collected %i lbry files", len(lbry_files))
|
2018-12-15 21:31:02 +01:00
|
|
|
return lbry_files
|
2017-03-06 23:01:35 +01:00
|
|
|
|
2018-05-02 01:30:33 +02:00
|
|
|
def _sort_lbry_files(self, lbry_files, sort_by):
|
|
|
|
for field, direction in sort_by:
|
2018-05-21 06:24:18 +02:00
|
|
|
is_reverse = direction == DIRECTION_DESCENDING
|
|
|
|
key_getter = create_key_getter(field) if field else None
|
2018-05-07 23:37:41 +02:00
|
|
|
lbry_files = sorted(lbry_files, key=key_getter, reverse=is_reverse)
|
2018-04-26 03:19:56 +02:00
|
|
|
return lbry_files
|
|
|
|
|
|
|
|
def _parse_lbry_files_sort(self, sort):
|
|
|
|
"""
|
2018-05-07 23:37:41 +02:00
|
|
|
Given a sort string like 'file_name, desc' or 'points_paid',
|
2018-05-02 01:30:33 +02:00
|
|
|
parse the string into a tuple of (field, direction).
|
|
|
|
Direction defaults to ascending.
|
2018-04-26 03:19:56 +02:00
|
|
|
"""
|
|
|
|
|
2018-05-21 06:24:18 +02:00
|
|
|
pieces = [p.strip() for p in sort.split(',')]
|
|
|
|
field = pieces.pop(0)
|
|
|
|
direction = DIRECTION_ASCENDING
|
|
|
|
if pieces and pieces[0] in DIRECTIONS:
|
|
|
|
direction = pieces[0]
|
|
|
|
return field, direction
|
|
|
|
|
2017-12-21 02:46:41 +01:00
|
|
|
def _get_single_peer_downloader(self):
|
|
|
|
downloader = SinglePeerDownloader()
|
2018-08-26 05:20:43 +02:00
|
|
|
downloader.setup(self.wallet_manager)
|
2017-12-21 02:46:41 +01:00
|
|
|
return downloader
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def _blob_availability(self, blob_hash, search_timeout, blob_timeout, downloader=None):
|
2017-12-21 02:46:41 +01:00
|
|
|
if not downloader:
|
|
|
|
downloader = self._get_single_peer_downloader()
|
|
|
|
search_timeout = search_timeout or conf.settings['peer_search_timeout']
|
|
|
|
blob_timeout = blob_timeout or conf.settings['sd_download_timeout']
|
|
|
|
reachable_peers = []
|
|
|
|
unreachable_peers = []
|
|
|
|
try:
|
2019-01-11 00:14:57 +01:00
|
|
|
peers = await self.jsonrpc_peer_list(blob_hash, search_timeout)
|
2018-10-09 20:50:44 +02:00
|
|
|
peer_infos = [{"peer": Peer(x['host'], x['port']),
|
2017-12-21 02:46:41 +01:00
|
|
|
"blob_hash": blob_hash,
|
2018-10-09 20:50:44 +02:00
|
|
|
"timeout": blob_timeout} for x in peers]
|
2017-12-21 02:46:41 +01:00
|
|
|
dl = []
|
|
|
|
dl_peers = []
|
|
|
|
dl_results = []
|
|
|
|
for peer_info in peer_infos:
|
2019-01-11 00:14:57 +01:00
|
|
|
dl.append(downloader.download_temp_blob_from_peer(**peer_info))
|
2017-12-21 02:46:41 +01:00
|
|
|
dl_peers.append("%s:%i" % (peer_info['peer'].host, peer_info['peer'].port))
|
2019-01-11 00:14:57 +01:00
|
|
|
for dl_peer, download_result in zip(dl_peers, await asyncio.gather(*dl)):
|
|
|
|
if download_result:
|
|
|
|
reachable_peers.append(dl_peer)
|
|
|
|
else:
|
|
|
|
unreachable_peers.append(dl_peer)
|
|
|
|
dl_results.append(download_result)
|
2017-12-21 02:46:41 +01:00
|
|
|
is_available = any(dl_results)
|
|
|
|
except Exception as err:
|
2018-12-15 21:31:02 +01:00
|
|
|
return {'error': "Failed to get peers for blob: %s" % err}
|
2017-12-21 02:46:41 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return {
|
2017-12-21 02:46:41 +01:00
|
|
|
'is_available': is_available,
|
|
|
|
'reachable_peers': reachable_peers,
|
|
|
|
'unreachable_peers': unreachable_peers,
|
|
|
|
}
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
############################################################################
|
|
|
|
# #
|
|
|
|
# JSON-RPC API methods start here #
|
|
|
|
# #
|
|
|
|
############################################################################
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("stop")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_daemon_stop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def jsonrpc_stop(self):
|
|
|
|
"""
|
|
|
|
Stop lbrynet
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stop
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(string) Shutdown message
|
|
|
|
"""
|
|
|
|
log.info("Shutting down lbrynet daemon")
|
2018-08-30 23:29:49 +02:00
|
|
|
return "Shutting down"
|
2018-08-26 05:20:43 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_status(self):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Get daemon status
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-08-02 23:33:56 +02:00
|
|
|
status
|
2017-06-12 22:19:26 +02:00
|
|
|
|
2018-08-03 19:31:51 +02:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2017-06-12 22:19:26 +02:00
|
|
|
Returns:
|
|
|
|
(dict) lbrynet-daemon status
|
|
|
|
{
|
2018-07-23 22:15:12 +02:00
|
|
|
'installation_id': (str) installation id - base58,
|
|
|
|
'is_running': (bool),
|
2017-12-17 07:00:12 +01:00
|
|
|
'is_first_run': bool,
|
2018-07-23 22:15:12 +02:00
|
|
|
'skipped_components': (list) [names of skipped components (str)],
|
|
|
|
'startup_status': { Does not include components which have been skipped
|
|
|
|
'database': (bool),
|
|
|
|
'wallet': (bool),
|
|
|
|
'session': (bool),
|
|
|
|
'dht': (bool),
|
|
|
|
'hash_announcer': (bool),
|
|
|
|
'stream_identifier': (bool),
|
|
|
|
'file_manager': (bool),
|
2018-08-02 23:33:56 +02:00
|
|
|
'blob_manager': (bool),
|
|
|
|
'blockchain_headers': (bool),
|
2018-07-23 22:15:12 +02:00
|
|
|
'peer_protocol_server': (bool),
|
|
|
|
'reflector': (bool),
|
|
|
|
'upnp': (bool),
|
|
|
|
'exchange_rate_manager': (bool),
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
|
|
|
'connection_status': {
|
2018-07-23 22:15:12 +02:00
|
|
|
'code': (str) connection status code,
|
|
|
|
'message': (str) connection status message
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
2018-08-02 23:33:56 +02:00
|
|
|
'blockchain_headers': {
|
|
|
|
'downloading_headers': (bool),
|
|
|
|
'download_progress': (float) 0-100.0
|
|
|
|
},
|
|
|
|
'wallet': {
|
2018-07-23 22:15:12 +02:00
|
|
|
'blocks': (int) local blockchain height,
|
|
|
|
'blocks_behind': (int) remote_height - local_height,
|
|
|
|
'best_blockhash': (str) block hash of most recent block,
|
2018-08-13 04:04:48 +02:00
|
|
|
'is_encrypted': (bool),
|
|
|
|
'is_locked': (bool),
|
2017-06-12 22:19:26 +02:00
|
|
|
},
|
2018-07-25 21:33:43 +02:00
|
|
|
'dht': {
|
2018-07-20 23:22:10 +02:00
|
|
|
'node_id': (str) lbry dht node id - hex encoded,
|
|
|
|
'peers_in_routing_table': (int) the number of peers in the routing table,
|
|
|
|
},
|
2018-08-02 23:33:56 +02:00
|
|
|
'blob_manager': {
|
|
|
|
'finished_blobs': (int) number of finished blobs in the blob manager,
|
|
|
|
},
|
|
|
|
'hash_announcer': {
|
|
|
|
'announce_queue_size': (int) number of blobs currently queued to be announced
|
|
|
|
},
|
|
|
|
'file_manager': {
|
|
|
|
'managed_files': (int) count of files in the file manager,
|
2018-10-18 21:10:00 +02:00
|
|
|
},
|
|
|
|
'upnp': {
|
2018-10-26 18:42:12 +02:00
|
|
|
'aioupnp_version': (str),
|
2018-10-18 21:10:00 +02:00
|
|
|
'redirects': {
|
|
|
|
<TCP | UDP>: (int) external_port,
|
|
|
|
},
|
|
|
|
'gateway': (str) manufacturer and model,
|
2018-10-18 23:41:49 +02:00
|
|
|
'dht_redirect_set': (bool),
|
|
|
|
'peer_redirect_set': (bool),
|
|
|
|
'external_ip': (str) external ip address,
|
2018-08-02 23:33:56 +02:00
|
|
|
}
|
2017-06-12 22:19:26 +02:00
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-08-02 23:33:56 +02:00
|
|
|
connection_code = CONNECTION_STATUS_CONNECTED if self.connected_to_internet else CONNECTION_STATUS_NETWORK
|
2017-01-03 20:13:01 +01:00
|
|
|
response = {
|
2017-03-30 18:55:54 +02:00
|
|
|
'installation_id': conf.settings.installation_id,
|
2018-07-20 22:46:15 +02:00
|
|
|
'is_running': all(self.component_manager.get_components_status().values()),
|
2018-07-23 22:15:12 +02:00
|
|
|
'is_first_run': self.is_first_run,
|
|
|
|
'skipped_components': self.component_manager.skip_components,
|
2018-07-24 18:34:58 +02:00
|
|
|
'startup_status': self.component_manager.get_components_status(),
|
2017-01-03 20:13:01 +01:00
|
|
|
'connection_status': {
|
2018-07-20 22:46:15 +02:00
|
|
|
'code': connection_code,
|
|
|
|
'message': CONNECTION_MESSAGES[connection_code],
|
2017-01-03 20:13:01 +01:00
|
|
|
},
|
|
|
|
}
|
2018-07-25 21:32:01 +02:00
|
|
|
for component in self.component_manager.components:
|
2019-01-07 08:52:53 +01:00
|
|
|
status = await component.get_status()
|
2018-07-25 21:32:01 +02:00
|
|
|
if status:
|
|
|
|
response[component.component_name] = status
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2016-04-07 09:12:09 +02:00
|
|
|
|
2016-04-12 04:28:46 +02:00
|
|
|
def jsonrpc_version(self):
|
|
|
|
"""
|
|
|
|
Get lbry version information
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
version
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of lbry version information
|
|
|
|
{
|
2017-03-15 21:31:58 +01:00
|
|
|
'build': (str) build type (e.g. "dev", "rc", "release"),
|
|
|
|
'ip': (str) remote ip, if available,
|
2017-03-14 00:14:11 +01:00
|
|
|
'lbrynet_version': (str) lbrynet_version,
|
|
|
|
'lbryum_version': (str) lbryum_version,
|
2018-10-30 18:41:38 +01:00
|
|
|
'lbryschema_version': (str) lbryschema_version,
|
2017-03-15 21:31:58 +01:00
|
|
|
'os_release': (str) os release string
|
|
|
|
'os_system': (str) os name
|
|
|
|
'platform': (str) platform string
|
|
|
|
'processor': (str) processor type,
|
|
|
|
'python_version': (str) python version,
|
2017-03-14 00:14:11 +01:00
|
|
|
}
|
2016-04-12 04:28:46 +02:00
|
|
|
"""
|
2018-07-20 21:35:09 +02:00
|
|
|
platform_info = system_info.get_platform()
|
2017-03-15 21:19:11 +01:00
|
|
|
log.info("Get version info: " + json.dumps(platform_info))
|
2018-12-15 21:31:02 +01:00
|
|
|
return platform_info
|
2016-04-09 10:15:58 +02:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_report_bug(self, message=None):
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
|
|
|
Report a bug to slack
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
report_bug (<message> | --message=<message>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--message=<message> : (str) Description of the bug
|
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if successful
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
|
|
|
|
2018-07-20 21:35:09 +02:00
|
|
|
platform_name = system_info.get_platform()['platform']
|
2017-02-02 16:23:17 +01:00
|
|
|
report_bug_to_slack(
|
|
|
|
message,
|
|
|
|
conf.settings.installation_id,
|
|
|
|
platform_name,
|
2018-11-09 19:06:02 +01:00
|
|
|
__version__
|
2017-02-02 16:23:17 +01:00
|
|
|
)
|
2018-12-15 21:31:02 +01:00
|
|
|
return True
|
2017-01-02 20:52:24 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_settings_get(self):
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Get daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
settings_get
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of daemon settings
|
2017-03-20 20:36:40 +01:00
|
|
|
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2018-12-15 21:31:02 +01:00
|
|
|
return conf.settings.get_adjustable_settings_dict()
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_settings_set(self, **kwargs):
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Set daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-06-22 00:17:54 +02:00
|
|
|
Usage:
|
2018-02-09 17:29:37 +01:00
|
|
|
settings_set [--download_directory=<download_directory>]
|
|
|
|
[--data_rate=<data_rate>]
|
|
|
|
[--download_timeout=<download_timeout>]
|
|
|
|
[--peer_port=<peer_port>]
|
|
|
|
[--max_key_fee=<max_key_fee>]
|
|
|
|
[--disable_max_key_fee=<disable_max_key_fee>]
|
|
|
|
[--use_upnp=<use_upnp>]
|
|
|
|
[--run_reflector_server=<run_reflector_server>]
|
|
|
|
[--cache_time=<cache_time>]
|
|
|
|
[--reflect_uploads=<reflect_uploads>]
|
|
|
|
[--share_usage_data=<share_usage_data>]
|
|
|
|
[--peer_search_timeout=<peer_search_timeout>]
|
|
|
|
[--sd_download_timeout=<sd_download_timeout>]
|
|
|
|
[--auto_renew_claim_height_delta=<auto_renew_claim_height_delta>]
|
2017-06-22 00:17:54 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--download_directory=<download_directory> : (str) path of download directory
|
|
|
|
--data_rate=<data_rate> : (float) 0.0001
|
|
|
|
--download_timeout=<download_timeout> : (int) 180
|
|
|
|
--peer_port=<peer_port> : (int) 3333
|
|
|
|
--max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads,
|
|
|
|
in the format:
|
|
|
|
{
|
|
|
|
'currency': <currency_symbol>,
|
|
|
|
'amount': <amount>
|
|
|
|
}.
|
|
|
|
In the CLI, it must be an escaped JSON string
|
|
|
|
Supported currency symbols: LBC, USD, BTC
|
|
|
|
--disable_max_key_fee=<disable_max_key_fee> : (bool) False
|
|
|
|
--use_upnp=<use_upnp> : (bool) True
|
|
|
|
--run_reflector_server=<run_reflector_server> : (bool) False
|
|
|
|
--cache_time=<cache_time> : (int) 150
|
|
|
|
--reflect_uploads=<reflect_uploads> : (bool) True
|
|
|
|
--share_usage_data=<share_usage_data> : (bool) True
|
|
|
|
--peer_search_timeout=<peer_search_timeout> : (int) 3
|
|
|
|
--sd_download_timeout=<sd_download_timeout> : (int) 3
|
|
|
|
--auto_renew_claim_height_delta=<auto_renew_claim_height_delta> : (int) 0
|
2017-11-28 19:48:17 +01:00
|
|
|
claims set to expire within this many blocks will be
|
|
|
|
automatically renewed after startup (if set to 0, renews
|
|
|
|
will not be made automatically)
|
|
|
|
|
2017-06-22 00:17:54 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-20 20:36:40 +01:00
|
|
|
(dict) Updated dictionary of daemon settings
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
|
|
|
|
2018-07-24 18:32:00 +02:00
|
|
|
# TODO: improve upon the current logic, it could be made better
|
|
|
|
new_settings = kwargs
|
|
|
|
|
|
|
|
setting_types = {
|
|
|
|
'download_directory': str,
|
|
|
|
'data_rate': float,
|
|
|
|
'download_timeout': int,
|
|
|
|
'peer_port': int,
|
|
|
|
'max_key_fee': dict,
|
|
|
|
'use_upnp': bool,
|
|
|
|
'run_reflector_server': bool,
|
|
|
|
'cache_time': int,
|
|
|
|
'reflect_uploads': bool,
|
|
|
|
'share_usage_data': bool,
|
|
|
|
'disable_max_key_fee': bool,
|
|
|
|
'peer_search_timeout': int,
|
|
|
|
'sd_download_timeout': int,
|
|
|
|
'auto_renew_claim_height_delta': int
|
|
|
|
}
|
|
|
|
|
2018-07-21 23:11:44 +02:00
|
|
|
for key, setting_type in setting_types.items():
|
2018-07-24 18:32:00 +02:00
|
|
|
if key in new_settings:
|
|
|
|
if isinstance(new_settings[key], setting_type):
|
|
|
|
conf.settings.update({key: new_settings[key]},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
2018-07-22 01:08:28 +02:00
|
|
|
elif setting_type is dict and isinstance(new_settings[key], str):
|
2018-07-24 18:32:00 +02:00
|
|
|
decoded = json.loads(str(new_settings[key]))
|
|
|
|
conf.settings.update({key: decoded},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
|
|
|
else:
|
|
|
|
converted = setting_type(new_settings[key])
|
|
|
|
conf.settings.update({key: converted},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
|
|
|
conf.settings.save_conf_file_settings()
|
2018-12-15 21:31:02 +01:00
|
|
|
return conf.settings.get_adjustable_settings_dict()
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_help(self, command=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-01-11 21:31:08 +01:00
|
|
|
Return a useful message for an API command
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
help [<command> | --command=<command>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--command=<command> : (str) command to retrieve documentation for
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Help message
|
2016-04-13 20:47:34 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
if command is None:
|
2018-12-15 21:31:02 +01:00
|
|
|
return {
|
2017-03-08 23:10:03 +01:00
|
|
|
'about': 'This is the LBRY JSON-RPC API',
|
|
|
|
'command_help': 'Pass a `command` parameter to this method to see ' +
|
|
|
|
'help for that command (e.g. `help command=resolve_name`)',
|
|
|
|
'command_list': 'Get a full list of commands using the `commands` method',
|
|
|
|
'more_info': 'Visit https://lbry.io/api for more info',
|
2018-12-15 21:31:02 +01:00
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
fn = self.callable_methods.get(command)
|
|
|
|
if fn is None:
|
2017-03-08 23:14:31 +01:00
|
|
|
raise Exception(
|
2018-10-18 12:42:45 +02:00
|
|
|
f"No help available for '{command}'. It is not a valid command."
|
2017-01-26 02:06:17 +01:00
|
|
|
)
|
2017-03-08 23:10:03 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return {
|
2017-10-09 22:14:16 +02:00
|
|
|
'help': textwrap.dedent(fn.__doc__ or '')
|
2018-12-15 21:31:02 +01:00
|
|
|
}
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_commands(self):
|
|
|
|
"""
|
|
|
|
Return a list of available commands
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
commands
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) list of available commands
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-12-15 21:31:02 +01:00
|
|
|
return sorted([command for command in self.callable_methods.keys()])
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("account_balance")
|
2018-10-09 16:30:30 +02:00
|
|
|
def jsonrpc_wallet_balance(self, address=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("account_unlock")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_unlock(self, password):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("account_decrypt")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_decrypt(self):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("account_encrypt")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_encrypt(self, new_password):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("address_is_mine")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_is_address_mine(self, address):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated()
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_public_key(self, address):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("address_list")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_list(self):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("address_unused")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_new_address(self):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated("address_unused")
|
2018-08-26 05:20:43 +02:00
|
|
|
def jsonrpc_wallet_unused_address(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_wallet_send(self, amount, address=None, claim_id=None, account_id=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Send credits. If given an address, send credits to it. If given a claim id, send a tip
|
|
|
|
to the owner of a claim specified by uri. A tip is a claim support where the recipient
|
|
|
|
of the support is the claim address for the claim being supported.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_send (<amount> | --amount=<amount>)
|
|
|
|
((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>))
|
2018-09-24 05:22:25 +02:00
|
|
|
[--account_id=<account_id>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--amount=<amount> : (decimal) amount of credit to send
|
|
|
|
--address=<address> : (str) address to send credits to
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to send to tip to
|
|
|
|
--account_id=<account_id> : (str) account to fund the transaction
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
If sending to an address:
|
2018-09-03 17:41:30 +02:00
|
|
|
(dict) Dictionary containing the transaction information
|
2018-08-26 05:20:43 +02:00
|
|
|
{
|
|
|
|
"hex": (str) raw transaction,
|
|
|
|
"inputs": (list) inputs(dict) used for the transaction,
|
|
|
|
"outputs": (list) outputs(dict) for the transaction,
|
|
|
|
"total_fee": (int) fee in dewies,
|
|
|
|
"total_input": (int) total of inputs in dewies,
|
|
|
|
"total_output": (int) total of outputs in dewies(input - fees),
|
|
|
|
"txid": (str) txid of the transaction,
|
|
|
|
}
|
|
|
|
|
|
|
|
If sending a claim tip:
|
|
|
|
(dict) Dictionary containing the result of the support
|
|
|
|
{
|
|
|
|
txid : (str) txid of resulting support claim
|
|
|
|
nout : (int) nout of the resulting support claim
|
|
|
|
fee : (float) fee paid for the transaction
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
|
|
|
if not amount:
|
|
|
|
raise NullFundsError
|
|
|
|
elif amount < 0:
|
|
|
|
raise NegativeFundsError()
|
|
|
|
|
|
|
|
if address and claim_id:
|
|
|
|
raise Exception("Given both an address and a claim id")
|
|
|
|
elif not address and not claim_id:
|
|
|
|
raise Exception("Not given an address or a claim id")
|
|
|
|
|
|
|
|
if address:
|
|
|
|
# raises an error if the address is invalid
|
|
|
|
decode_address(address)
|
|
|
|
|
|
|
|
reserved_points = self.wallet_manager.reserve_points(address, amount)
|
|
|
|
if reserved_points is None:
|
|
|
|
raise InsufficientFundsError()
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-16 21:04:20 +02:00
|
|
|
result = await self.wallet_manager.send_points_to_address(reserved_points, amount, account)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_credits_sent()
|
2018-08-26 05:20:43 +02:00
|
|
|
else:
|
2018-09-04 21:05:45 +02:00
|
|
|
log.info("This command is deprecated for sending tips, please use the newer claim_tip command")
|
2018-10-16 21:04:20 +02:00
|
|
|
result = await self.jsonrpc_claim_tip(claim_id=claim_id, amount=amount, account_id=account_id)
|
2018-08-31 22:49:55 +02:00
|
|
|
return result
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
# @AuthJSONRPCServer.deprecated("account_fund"), API has changed as well, so we forward for now
|
|
|
|
# marked as deprecated in changelog and will be removed after subsequent release
|
|
|
|
def jsonrpc_wallet_prefill_addresses(self, num_addresses, amount, no_broadcast=False):
|
|
|
|
"""
|
|
|
|
Create new UTXOs, each containing `amount` credits
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_prefill_addresses [--no_broadcast]
|
|
|
|
(<num_addresses> | --num_addresses=<num_addresses>)
|
|
|
|
(<amount> | --amount=<amount>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--no_broadcast : (bool) whether to broadcast or not
|
|
|
|
--num_addresses=<num_addresses> : (int) num of addresses to create
|
2018-09-24 05:22:25 +02:00
|
|
|
--amount=<amount> : (decimal) initial amount in each address
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) the resulting transaction
|
|
|
|
"""
|
|
|
|
broadcast = not no_broadcast
|
|
|
|
return self.jsonrpc_account_fund(
|
2018-09-21 15:47:06 +02:00
|
|
|
self.default_account.id,
|
|
|
|
self.default_account.id,
|
2018-08-26 05:20:43 +02:00
|
|
|
amount=amount,
|
|
|
|
outputs=num_addresses,
|
|
|
|
broadcast=broadcast
|
|
|
|
)
|
|
|
|
|
|
|
|
@requires("wallet")
|
2018-10-17 16:51:22 +02:00
|
|
|
def jsonrpc_account_list(self, account_id=None, confirmations=6,
|
2018-08-30 06:04:25 +02:00
|
|
|
include_claims=False, show_seed=False):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-08-30 06:04:25 +02:00
|
|
|
List details of all of the accounts or a specific account.
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-08-30 06:04:25 +02:00
|
|
|
account_list [<account_id>] [--confirmations=<confirmations>]
|
2018-10-17 16:51:22 +02:00
|
|
|
[--include_claims] [--show_seed]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
2018-08-26 05:20:43 +02:00
|
|
|
account will be given
|
2018-10-17 16:51:22 +02:00
|
|
|
--confirmations=<confirmations> : (int) required confirmations (default: 0)
|
2018-08-26 05:20:43 +02:00
|
|
|
--include_claims : (bool) include claims, requires than a
|
|
|
|
LBC account is specified (default: false)
|
2018-08-30 06:04:25 +02:00
|
|
|
--show_seed : (bool) show the seed for the account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) balance of account(s)
|
|
|
|
"""
|
2018-10-17 16:51:22 +02:00
|
|
|
kwargs = {
|
|
|
|
'confirmations': confirmations,
|
|
|
|
'show_seed': show_seed
|
|
|
|
}
|
2018-08-30 06:04:25 +02:00
|
|
|
if account_id:
|
2018-10-17 16:51:22 +02:00
|
|
|
return self.get_account_or_error(account_id).get_details(**kwargs)
|
2018-08-26 05:20:43 +02:00
|
|
|
else:
|
2018-10-17 16:51:22 +02:00
|
|
|
return self.wallet_manager.get_detailed_accounts(**kwargs)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_balance(self, account_id=None, confirmations=0):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-08-26 05:20:43 +02:00
|
|
|
Return the balance of an account
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-10-09 16:30:30 +02:00
|
|
|
account_balance [<account_id>] [<address> | --address=<address>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-10-08 16:41:07 +02:00
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
|
|
|
account will be given. Otherwise default account.
|
|
|
|
--confirmations=<confirmations> : (int) Only include transactions with this many
|
|
|
|
confirmed blocks.
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2018-09-24 05:22:25 +02:00
|
|
|
(decimal) amount of lbry credits in wallet
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-16 21:04:20 +02:00
|
|
|
dewies = await account.get_balance(confirmations=confirmations)
|
2018-10-03 22:38:47 +02:00
|
|
|
return dewies_to_lbc(dewies)
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_add(
|
2018-08-26 05:20:43 +02:00
|
|
|
self, account_name, single_key=False, seed=None, private_key=None, public_key=None):
|
|
|
|
"""
|
|
|
|
Add a previously created account from a seed, private key or public key (read-only).
|
|
|
|
Specify --single_key for single address or vanity address accounts.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_add (<account_name> | --account_name=<account_name>)
|
|
|
|
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
|
|
|
|
[--single_key]
|
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_name=<account_name> : (str) name of the account to add
|
|
|
|
--seed=<seed> : (str) seed to generate new account from
|
|
|
|
--private_key=<private_key> : (str) private key for new account
|
|
|
|
--public_key=<public_key> : (str) public key for new account
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) added account details
|
|
|
|
|
|
|
|
"""
|
|
|
|
account = LBCAccount.from_dict(
|
2018-08-30 06:04:25 +02:00
|
|
|
self.ledger, self.default_wallet, {
|
2018-08-26 05:20:43 +02:00
|
|
|
'name': account_name,
|
|
|
|
'seed': seed,
|
|
|
|
'private_key': private_key,
|
|
|
|
'public_key': public_key,
|
|
|
|
'address_generator': {
|
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
|
|
|
if self.ledger.network.is_connected:
|
2018-11-20 01:23:23 +01:00
|
|
|
await self.ledger.subscribe_account(account)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
result = account.to_dict()
|
2018-09-03 17:41:30 +02:00
|
|
|
result['id'] = account.id
|
2018-08-26 05:20:43 +02:00
|
|
|
result['status'] = 'added'
|
|
|
|
result.pop('certificates', None)
|
2018-08-26 06:44:23 +02:00
|
|
|
result['is_default'] = self.default_wallet.accounts[0] == account
|
2018-08-26 05:20:43 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
@requires("wallet")
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_account_create(self, account_name, single_key=False):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Create a new account. Specify --single_key if you want to use
|
|
|
|
the same address for all transactions (not recommended).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_create (<account_name> | --account_name=<account_name>) [--single_key]
|
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_name=<account_name> : (str) name of the account to create
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) new account details
|
|
|
|
|
|
|
|
"""
|
|
|
|
account = LBCAccount.generate(
|
2018-08-30 06:04:25 +02:00
|
|
|
self.ledger, self.default_wallet, account_name, {
|
2018-08-26 05:20:43 +02:00
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
|
|
|
if self.ledger.network.is_connected:
|
2018-11-20 01:23:23 +01:00
|
|
|
await self.ledger.subscribe_account(account)
|
2018-08-30 06:04:25 +02:00
|
|
|
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
result = account.to_dict()
|
2018-09-03 17:41:30 +02:00
|
|
|
result['id'] = account.id
|
2018-08-26 05:20:43 +02:00
|
|
|
result['status'] = 'created'
|
|
|
|
result.pop('certificates', None)
|
2018-08-26 06:44:23 +02:00
|
|
|
result['is_default'] = self.default_wallet.accounts[0] == account
|
2018-08-26 05:20:43 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
@requires("wallet")
|
2018-08-30 06:04:25 +02:00
|
|
|
def jsonrpc_account_remove(self, account_id):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Remove an existing account.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
account (<account_id> | --account_id=<account_id>)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to remove
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
2018-08-30 06:04:25 +02:00
|
|
|
(map) details of removed account
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_error(account_id)
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.accounts.remove(account)
|
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
result = account.to_dict()
|
2018-09-03 17:41:30 +02:00
|
|
|
result['id'] = account.id
|
2018-08-26 05:20:43 +02:00
|
|
|
result['status'] = 'removed'
|
|
|
|
result.pop('certificates', None)
|
|
|
|
return result
|
|
|
|
|
|
|
|
@requires("wallet")
|
|
|
|
def jsonrpc_account_set(
|
2018-08-30 06:04:25 +02:00
|
|
|
self, account_id, default=False, new_name=None,
|
2018-08-26 05:20:43 +02:00
|
|
|
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
|
|
|
|
"""
|
|
|
|
Change various settings on an account.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
account (<account_id> | --account_id=<account_id>)
|
2018-08-30 06:04:25 +02:00
|
|
|
[--default] [--new_name=<new_name>]
|
|
|
|
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
|
|
|
|
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to change
|
2018-08-26 05:20:43 +02:00
|
|
|
--default : (bool) make this account the default
|
2018-08-30 06:04:25 +02:00
|
|
|
--new_name=<new_name> : (str) new name for the account
|
2018-08-26 05:20:43 +02:00
|
|
|
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
|
|
|
|
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a receiving address
|
|
|
|
--change_gap=<change_gap> : (int) set the gap for change addresses
|
|
|
|
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a change address
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) updated account details
|
|
|
|
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_error(account_id)
|
2018-08-26 05:20:43 +02:00
|
|
|
change_made = False
|
|
|
|
|
|
|
|
if account.receiving.name == HierarchicalDeterministic.name:
|
|
|
|
address_changes = {
|
|
|
|
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
|
|
|
|
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
|
|
|
|
}
|
|
|
|
for chain_name in address_changes:
|
|
|
|
chain = getattr(account, chain_name)
|
|
|
|
for attr, value in address_changes[chain_name].items():
|
|
|
|
if value is not None:
|
|
|
|
setattr(chain, attr, value)
|
|
|
|
change_made = True
|
|
|
|
|
2018-08-30 06:04:25 +02:00
|
|
|
if new_name is not None:
|
|
|
|
account.name = new_name
|
|
|
|
change_made = True
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
if default:
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.accounts.remove(account)
|
|
|
|
self.default_wallet.accounts.insert(0, account)
|
2018-08-26 05:20:43 +02:00
|
|
|
change_made = True
|
|
|
|
|
|
|
|
if change_made:
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
result = account.to_dict()
|
2018-09-03 17:41:30 +02:00
|
|
|
result['id'] = account.id
|
2018-08-26 05:20:43 +02:00
|
|
|
result.pop('certificates', None)
|
2018-08-26 06:44:23 +02:00
|
|
|
result['is_default'] = self.default_wallet.accounts[0] == account
|
2018-08-26 05:20:43 +02:00
|
|
|
return result
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_unlock(self, password, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2018-08-26 05:20:43 +02:00
|
|
|
Unlock an encrypted account
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2018-09-25 15:41:31 +02:00
|
|
|
account_unlock (<password> | --password=<password>) [<account_id> | --account_id=<account_id>]
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Options:
|
2018-09-25 15:41:31 +02:00
|
|
|
--account_id=<account_id> : (str) id for the account to unlock
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Returns:
|
2018-08-26 05:20:43 +02:00
|
|
|
(bool) true if account is unlocked, otherwise false
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
return self.wallet_manager.unlock_account(
|
|
|
|
password, self.get_account_or_default(account_id, lbc_only=False)
|
|
|
|
)
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-09-25 15:41:41 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
def jsonrpc_account_lock(self, account_id=None):
|
|
|
|
"""
|
|
|
|
Lock an unlocked account
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_lock [<account_id> | --account_id=<account_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id for the account to lock
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if account is locked, otherwise false
|
|
|
|
"""
|
|
|
|
|
|
|
|
return self.wallet_manager.lock_account(self.get_account_or_default(account_id, lbc_only=False))
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_decrypt(self, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2018-08-26 05:20:43 +02:00
|
|
|
Decrypt an encrypted account, this will remove the wallet password
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2018-09-25 15:41:31 +02:00
|
|
|
account_decrypt [<account_id> | --account_id=<account_id>]
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-25 15:41:31 +02:00
|
|
|
--account_id=<account_id> : (str) id for the account to decrypt
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-12-17 07:00:12 +01:00
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
return self.wallet_manager.decrypt_account(self.get_account_or_default(account_id, lbc_only=False))
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-09-25 15:41:31 +02:00
|
|
|
def jsonrpc_account_encrypt(self, new_password, account_id=None):
|
2017-12-17 07:00:12 +01:00
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
Encrypt an unencrypted account with a password
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Usage:
|
2018-09-25 15:41:31 +02:00
|
|
|
wallet_encrypt (<new_password> | --new_password=<new_password>) [<account_id> | --account_id=<account_id>]
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Options:
|
2018-09-25 15:41:31 +02:00
|
|
|
--account_id=<account_id> : (str) id for the account to encrypt
|
2017-12-17 07:00:12 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
return self.wallet_manager.encrypt_account(
|
|
|
|
new_password,
|
|
|
|
self.get_account_or_default(account_id, lbc_only=False)
|
|
|
|
)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-09-21 15:47:06 +02:00
|
|
|
def jsonrpc_account_max_address_gap(self, account_id):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Finds ranges of consecutive addresses that are unused and returns the length
|
|
|
|
of the longest such range: for change and receiving address chains. This is
|
|
|
|
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
|
|
|
|
account settings.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
account_max_address_gap (<account_id> | --account_id=<account_id>)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) account for which to get max gaps
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) maximum gap for change and receiving addresses
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_error(account_id).get_max_gap()
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires("wallet")
|
2018-11-21 00:21:53 +01:00
|
|
|
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
|
2018-08-26 05:20:43 +02:00
|
|
|
everything=False, outputs=1, broadcast=False):
|
|
|
|
"""
|
|
|
|
Transfer some amount (or --everything) to an account from another
|
|
|
|
account (can be the same account). Amounts are interpreted as LBC.
|
|
|
|
You can also spread the transfer across a number of --outputs (cannot
|
|
|
|
be used together with --everything).
|
|
|
|
|
|
|
|
Usage:
|
2018-11-21 00:21:53 +01:00
|
|
|
account_fund [<to_account> | --to_account=<to_account>]
|
|
|
|
[<from_account> | --from_account=<from_account>]
|
2018-08-26 05:20:43 +02:00
|
|
|
(<amount> | --amount=<amount> | --everything)
|
|
|
|
[<outputs> | --outputs=<outputs>]
|
|
|
|
[--broadcast]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--to_account=<to_account> : (str) send to this account
|
|
|
|
--from_account=<from_account> : (str) spend from this account
|
|
|
|
--amount=<amount> : (str) the amount to transfer lbc
|
|
|
|
--everything : (bool) transfer everything (excluding claims), default: false.
|
|
|
|
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
|
|
|
|
--broadcast : (bool) actually broadcast the transaction, default: false.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) transaction performing requested action
|
2017-12-17 07:00:12 +01:00
|
|
|
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-11-21 00:21:53 +01:00
|
|
|
to_account = self.get_account_or_default(to_account, 'to_account')
|
|
|
|
from_account = self.get_account_or_default(from_account, 'from_account')
|
2018-08-26 05:20:43 +02:00
|
|
|
amount = self.get_dewies_or_error('amount', amount) if amount else None
|
|
|
|
if not isinstance(outputs, int):
|
|
|
|
raise ValueError("--outputs must be an integer.")
|
|
|
|
if everything and outputs > 1:
|
|
|
|
raise ValueError("Using --everything along with --outputs is not supported.")
|
|
|
|
return from_account.fund(
|
|
|
|
to_account=to_account, amount=amount, everything=everything,
|
|
|
|
outputs=outputs, broadcast=broadcast
|
|
|
|
)
|
|
|
|
|
2018-11-21 00:21:53 +01:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
|
|
|
async def jsonrpc_account_send(self, amount, addresses, account_id=None, broadcast=False):
|
|
|
|
"""
|
|
|
|
Send the same number of credits to multiple addresses.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_send <amount> <addresses>... [--account_id=<account_id>] [--broadcast]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) account to fund the transaction
|
|
|
|
--broadcast : (bool) actually broadcast the transaction, default: false.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
"""
|
|
|
|
|
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
|
|
|
if not amount:
|
|
|
|
raise NullFundsError
|
|
|
|
elif amount < 0:
|
|
|
|
raise NegativeFundsError()
|
|
|
|
|
|
|
|
for address in addresses:
|
|
|
|
decode_address(address)
|
|
|
|
|
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
result = await account.send_to_addresses(amount, addresses, broadcast)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_credits_sent()
|
2018-11-21 00:21:53 +01:00
|
|
|
return result
|
|
|
|
|
2018-08-26 05:20:43 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-09-21 15:47:06 +02:00
|
|
|
def jsonrpc_address_is_mine(self, address, account_id=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Checks if an address is associated with the current wallet.
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-08-26 05:20:43 +02:00
|
|
|
wallet_is_address_mine (<address> | --address=<address>)
|
2018-09-24 05:22:25 +02:00
|
|
|
[<account_id> | --account_id=<account_id>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--address=<address> : (str) address to check
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true, if address is associated with current wallet
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.wallet_manager.address_is_mine(
|
|
|
|
address, self.get_account_or_default(account_id)
|
|
|
|
)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_address_list(self, account_id=None, page=None, page_size=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
List account addresses
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
address_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2018-08-26 05:20:43 +02:00
|
|
|
List of wallet addresses
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_addresses,
|
|
|
|
account.get_address_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT)
|
2018-09-21 15:47:06 +02:00
|
|
|
def jsonrpc_address_unused(self, account_id=None):
|
2018-08-26 05:20:43 +02:00
|
|
|
"""
|
|
|
|
Return an address containing no balance, will create
|
|
|
|
a new address if there is none.
|
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
address_unused [--account_id=<account_id>]
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-08-26 05:20:43 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Unused wallet address in base58
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_default(account_id).receiving.get_or_create_usable_address()
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(FILE_MANAGER_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_file_list(self, sort=None, **kwargs):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
List files limited by optional filters
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
|
2018-02-12 20:15:02 +01:00
|
|
|
[--rowid=<rowid>] [--claim_id=<claim_id>] [--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
|
|
|
|
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
|
2018-09-07 08:57:44 +02:00
|
|
|
[--claim_name=<claim_name>] [--sort=<sort_method>...]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
2018-02-12 20:15:02 +01:00
|
|
|
downloads folder
|
2018-02-24 19:13:29 +01:00
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--claim_id=<claim_id> : (str) get file with matching claim id
|
|
|
|
--outpoint=<outpoint> : (str) get file with matching claim outpoint
|
|
|
|
--txid=<txid> : (str) get file with matching claim txid
|
|
|
|
--nout=<nout> : (int) get file with matching claim nout
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) get file with matching channel name
|
|
|
|
--claim_name=<claim_name> : (str) get file with matching claim name
|
2018-05-07 23:37:41 +02:00
|
|
|
--sort=<sort_method> : (str) sort by any property, like 'file_name'
|
|
|
|
or 'metadata.author'; to specify direction
|
|
|
|
append ',asc' or ',desc'
|
2016-04-24 10:42:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of files
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'completed': (bool) true if download is completed,
|
|
|
|
'file_name': (str) name of file,
|
|
|
|
'download_directory': (str) download directory,
|
|
|
|
'points_paid': (float) credit paid to download file,
|
|
|
|
'stopped': (bool) true if download is stopped,
|
|
|
|
'stream_hash': (str) stream hash of file,
|
|
|
|
'stream_name': (str) stream name ,
|
|
|
|
'suggested_file_name': (str) suggested file name,
|
|
|
|
'sd_hash': (str) sd hash of file,
|
|
|
|
'download_path': (str) download path of file,
|
|
|
|
'mime_type': (str) mime type of file,
|
|
|
|
'key': (str) key attached to file,
|
2018-09-07 08:57:44 +02:00
|
|
|
'total_bytes': (int) file size in bytes,
|
2017-12-06 23:22:47 +01:00
|
|
|
'written_bytes': (int) written size in bytes,
|
2018-09-07 08:57:44 +02:00
|
|
|
'blobs_completed': (int) number of fully downloaded blobs,
|
|
|
|
'blobs_in_stream': (int) total blobs on stream,
|
|
|
|
'status': (str) downloader status
|
|
|
|
'claim_id': (str) None if claim is not found else the claim id,
|
|
|
|
'outpoint': (str) None if claim is not found else the tx and output,
|
|
|
|
'txid': (str) None if claim is not found else the transaction id,
|
|
|
|
'nout': (int) None if claim is not found else the transaction output index,
|
|
|
|
'metadata': (dict) None if claim is not found else the claim metadata,
|
|
|
|
'channel_claim_id': (str) None if claim is not found or not signed,
|
|
|
|
'channel_name': (str) None if claim is not found or not signed,
|
|
|
|
'claim_name': (str) None if claim is not found else the claim name
|
2017-03-14 00:14:11 +01:00
|
|
|
},
|
2017-03-06 23:01:35 +01:00
|
|
|
]
|
|
|
|
"""
|
2018-12-15 21:31:02 +01:00
|
|
|
result = await self._get_lbry_files(return_json=True, **kwargs)
|
2018-05-07 23:37:41 +02:00
|
|
|
if sort:
|
|
|
|
sort_by = [self._parse_lbry_files_sort(s) for s in sort]
|
|
|
|
result = self._sort_lbry_files(result, sort_by)
|
2018-12-15 21:31:02 +01:00
|
|
|
return result
|
2016-10-20 21:52:37 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-17 21:34:45 +02:00
|
|
|
async def jsonrpc_resolve_name(self, name, force=False):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
Resolve stream info from a LBRY name
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
resolve_name (<name> | --name=<name>) [--force]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--name=<name> : (str) the name to resolve
|
|
|
|
--force : (bool) force refresh and do not check cache
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Metadata dictionary from name claim, None if the name is not
|
|
|
|
resolvable
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-02-11 16:33:06 +01:00
|
|
|
try:
|
2018-07-10 06:30:13 +02:00
|
|
|
name = parse_lbry_uri(name).name
|
2018-10-17 21:34:45 +02:00
|
|
|
metadata = await self.wallet_manager.resolve(name, check_cache=not force)
|
2018-07-10 06:30:13 +02:00
|
|
|
if name in metadata:
|
|
|
|
metadata = metadata[name]
|
2018-10-17 21:34:45 +02:00
|
|
|
return metadata
|
2017-02-11 16:33:06 +01:00
|
|
|
except UnknownNameError:
|
|
|
|
log.info('Name %s is not known', name)
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-17 21:25:44 +02:00
|
|
|
async def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None):
|
2016-07-28 22:12:20 +02:00
|
|
|
"""
|
2017-06-23 20:47:28 +02:00
|
|
|
Resolve claim info from txid/nout or with claim ID
|
2016-07-28 22:12:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-23 18:00:33 +02:00
|
|
|
claim_show [<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[<claim_id> | --claim_id=<claim_id>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--txid=<txid> : (str) look for claim with this txid, nout must
|
|
|
|
also be specified
|
|
|
|
--nout=<nout> : (int) look for claim with this nout, txid must
|
|
|
|
also be specified
|
|
|
|
--claim_id=<claim_id> : (str) look for claim with this claim id
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
2017-06-23 20:47:28 +02:00
|
|
|
(dict) Dictionary containing claim info as below,
|
2017-03-11 22:04:10 +01:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
{
|
|
|
|
'txid': (str) txid of claim
|
|
|
|
'nout': (int) nout of claim
|
|
|
|
'amount': (float) amount of claim
|
|
|
|
'value': (str) value of claim
|
|
|
|
'height' : (int) height of claim takeover
|
|
|
|
'claim_id': (str) claim ID of claim
|
|
|
|
'supports': (list) list of supports associated with claim
|
|
|
|
}
|
2017-06-23 20:47:28 +02:00
|
|
|
|
|
|
|
if claim cannot be resolved, dictionary as below will be returned
|
|
|
|
|
|
|
|
{
|
|
|
|
'error': (str) reason for error
|
|
|
|
}
|
|
|
|
|
2016-07-28 22:12:20 +02:00
|
|
|
"""
|
2017-06-23 18:00:33 +02:00
|
|
|
if claim_id is not None and txid is None and nout is None:
|
2018-10-17 21:25:44 +02:00
|
|
|
claim_results = await self.wallet_manager.get_claim_by_claim_id(claim_id)
|
2017-06-23 18:00:33 +02:00
|
|
|
elif txid is not None and nout is not None and claim_id is None:
|
2018-10-17 21:25:44 +02:00
|
|
|
claim_results = await self.wallet_manager.get_claim_by_outpoint(txid, int(nout))
|
2017-06-23 18:00:33 +02:00
|
|
|
else:
|
|
|
|
raise Exception("Must specify either txid/nout, or claim_id")
|
2018-10-17 21:25:44 +02:00
|
|
|
return claim_results
|
2016-07-28 20:55:17 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_resolve(self, force=False, uri=None, uris=None):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
Resolve given LBRY URIs
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
resolve [--force] (<uri> | --uri=<uri>) [<uris>...]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--force : (bool) force refresh and ignore cache
|
|
|
|
--uri=<uri> : (str) uri to resolve
|
|
|
|
--uris=<uris> : (list) uris to resolve
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
2017-06-09 19:47:13 +02:00
|
|
|
Dictionary of results, keyed by uri
|
|
|
|
'<uri>': {
|
|
|
|
If a resolution error occurs:
|
|
|
|
'error': Error message
|
|
|
|
|
|
|
|
If the uri resolves to a channel or a claim in a channel:
|
|
|
|
'certificate': {
|
2017-04-12 23:42:55 +02:00
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
2018-01-26 11:16:02 +01:00
|
|
|
'permanent_url': (str) permanent url of the certificate claim,
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
2017-04-12 23:42:55 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
2017-04-12 20:04:11 +02:00
|
|
|
}
|
2017-06-09 19:47:13 +02:00
|
|
|
|
2017-08-17 20:32:28 +02:00
|
|
|
If the uri resolves to a channel:
|
|
|
|
'claims_in_channel': (int) number of claims in the channel,
|
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
If the uri resolves to a claim:
|
|
|
|
'claim': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
2018-01-26 11:16:02 +01:00
|
|
|
'permanent_url': (str) permanent url of the claim,
|
2017-06-09 19:47:13 +02:00
|
|
|
'channel_name': (str) channel name if claim is in a channel
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}]
|
2017-06-09 19:47:13 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
2017-04-07 02:45:05 +02:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
uris = tuple(uris or [])
|
2017-06-09 19:47:13 +02:00
|
|
|
if uri is not None:
|
|
|
|
uris += (uri,)
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
|
|
|
|
valid_uris = tuple()
|
|
|
|
for u in uris:
|
|
|
|
try:
|
|
|
|
parse_lbry_uri(u)
|
2017-12-21 02:46:41 +01:00
|
|
|
valid_uris += (u,)
|
2017-06-09 19:47:13 +02:00
|
|
|
except URIParseError:
|
|
|
|
results[u] = {"error": "%s is not a valid uri" % u}
|
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
resolved = await self.wallet_manager.resolve(*valid_uris, check_cache=not force)
|
2017-06-09 19:47:13 +02:00
|
|
|
for resolved_uri in resolved:
|
|
|
|
results[resolved_uri] = resolved[resolved_uri]
|
2018-10-16 21:04:20 +02:00
|
|
|
return results
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-10-30 18:41:38 +01:00
|
|
|
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
|
|
|
RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2018-10-17 16:26:17 +02:00
|
|
|
async def jsonrpc_get(self, uri, file_name=None, timeout=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
Download stream from a LBRY name.
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>]
|
2017-06-13 21:28:49 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--uri=<uri> : (str) uri of the content to download
|
|
|
|
--file_name=<file_name> : (str) specified name for the downloaded file
|
|
|
|
--timeout=<timeout> : (int) download timeout in number of seconds
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-09-12 15:41:11 +02:00
|
|
|
(dict) Dictionary containing information about the stream
|
2017-03-06 23:24:13 +01:00
|
|
|
{
|
2018-02-12 20:13:30 +01:00
|
|
|
'completed': (bool) true if download is completed,
|
|
|
|
'file_name': (str) name of file,
|
|
|
|
'download_directory': (str) download directory,
|
|
|
|
'points_paid': (float) credit paid to download file,
|
|
|
|
'stopped': (bool) true if download is stopped,
|
|
|
|
'stream_hash': (str) stream hash of file,
|
|
|
|
'stream_name': (str) stream name ,
|
|
|
|
'suggested_file_name': (str) suggested file name,
|
|
|
|
'sd_hash': (str) sd hash of file,
|
|
|
|
'download_path': (str) download path of file,
|
|
|
|
'mime_type': (str) mime type of file,
|
|
|
|
'key': (str) key attached to file,
|
2018-09-07 08:57:44 +02:00
|
|
|
'total_bytes': (int) file size in bytes,
|
2018-02-12 20:13:30 +01:00
|
|
|
'written_bytes': (int) written size in bytes,
|
2018-09-07 08:57:44 +02:00
|
|
|
'blobs_completed': (int) number of fully downloaded blobs,
|
|
|
|
'blobs_in_stream': (int) total blobs on stream,
|
|
|
|
'status': (str) downloader status,
|
2018-02-12 20:13:30 +01:00
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'outpoint': (str) claim outpoint string,
|
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (int) claim nout,
|
|
|
|
'metadata': (dict) claim metadata,
|
|
|
|
'channel_claim_id': (str) None if claim is not signed
|
|
|
|
'channel_name': (str) None if claim is not signed
|
|
|
|
'claim_name': (str) claim name
|
2017-03-06 23:24:13 +01:00
|
|
|
}
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2017-02-20 01:22:21 +01:00
|
|
|
|
2018-07-20 21:35:09 +02:00
|
|
|
timeout = timeout if timeout is not None else conf.settings['download_timeout']
|
2016-12-30 19:24:52 +01:00
|
|
|
|
2018-02-28 20:50:27 +01:00
|
|
|
parsed_uri = parse_lbry_uri(uri)
|
2019-01-08 22:37:53 +01:00
|
|
|
if parsed_uri.is_channel:
|
2018-02-28 20:50:27 +01:00
|
|
|
raise Exception("cannot download a channel claim, specify a /path")
|
|
|
|
|
2018-10-17 16:26:17 +02:00
|
|
|
resolved = (await self.wallet_manager.resolve(uri)).get(uri, {})
|
2018-08-31 15:43:35 +02:00
|
|
|
resolved = resolved if 'value' in resolved else resolved.get('claim')
|
|
|
|
|
|
|
|
if not resolved:
|
2018-08-23 01:14:14 +02:00
|
|
|
raise ResolveError(
|
2018-08-31 15:43:35 +02:00
|
|
|
"Failed to resolve stream at lbry://{}".format(uri.replace("lbry://", ""))
|
|
|
|
)
|
2018-11-19 21:00:09 +01:00
|
|
|
if 'error' in resolved:
|
|
|
|
raise ResolveError(f"error resolving stream: {resolved['error']}")
|
2017-12-07 00:27:42 +01:00
|
|
|
txid, nout, name = resolved['txid'], resolved['nout'], resolved['name']
|
2017-06-02 20:00:13 +02:00
|
|
|
claim_dict = ClaimDict.load_dict(resolved['value'])
|
2018-08-10 23:23:50 +02:00
|
|
|
sd_hash = claim_dict.source_hash.decode()
|
2017-04-07 02:40:55 +02:00
|
|
|
|
2017-11-06 18:26:42 +01:00
|
|
|
if sd_hash in self.streams:
|
2017-04-07 02:40:55 +02:00
|
|
|
log.info("Already waiting on lbry://%s to start downloading", name)
|
2018-10-17 16:26:17 +02:00
|
|
|
await d2f(self.streams[sd_hash].data_downloading_deferred)
|
2017-04-07 02:40:55 +02:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_file = await self._get_lbry_file(FileID.SD_HASH, sd_hash, return_json=False)
|
2017-03-06 23:24:13 +01:00
|
|
|
|
2016-12-30 19:24:52 +01:00
|
|
|
if lbry_file:
|
2017-03-06 23:24:13 +01:00
|
|
|
if not os.path.isfile(os.path.join(lbry_file.download_directory, lbry_file.file_name)):
|
|
|
|
log.info("Already have lbry file but missing file in %s, rebuilding it",
|
|
|
|
lbry_file.download_directory)
|
2018-10-17 16:26:17 +02:00
|
|
|
await d2f(lbry_file.start())
|
2017-03-06 23:24:13 +01:00
|
|
|
else:
|
|
|
|
log.info('Already have a file for %s', name)
|
2018-12-15 21:31:02 +01:00
|
|
|
result = await self._get_lbry_file_dict(lbry_file)
|
2017-03-06 23:24:13 +01:00
|
|
|
else:
|
2018-12-15 21:31:02 +01:00
|
|
|
result = await self._download_name(name, claim_dict, sd_hash, txid, nout,
|
|
|
|
timeout=timeout, file_name=file_name)
|
2018-10-17 16:26:17 +02:00
|
|
|
return result
|
2015-12-08 06:37:49 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(FILE_MANAGER_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_file_set_status(self, status, **kwargs):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-24 18:37:31 +01:00
|
|
|
Start or stop downloading a file
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
|
|
|
|
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--status=<status> : (str) one of "start" or "stop"
|
|
|
|
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) set status of file with matching file name in the
|
2017-06-01 21:24:38 +02:00
|
|
|
downloads folder
|
2018-02-24 19:13:29 +01:00
|
|
|
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) set status of file with matching row id
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-24 23:51:24 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Confirmation message
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if status not in ['start', 'stop']:
|
|
|
|
raise Exception('Status must be "start" or "stop".')
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-02-02 05:40:03 +01:00
|
|
|
search_type, value = get_lbry_file_search_value(kwargs)
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_file = await self._get_lbry_file(search_type, value, return_json=False)
|
2017-01-03 20:13:01 +01:00
|
|
|
if not lbry_file:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise Exception(f'Unable to find a file for {search_type}:{value}')
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if status == 'start' and lbry_file.stopped or status == 'stop' and not lbry_file.stopped:
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.file_manager.toggle_lbry_file_running(lbry_file))
|
2017-03-24 18:37:31 +01:00
|
|
|
msg = "Started downloading file" if status == 'start' else "Stopped downloading file"
|
2017-01-03 20:13:01 +01:00
|
|
|
else:
|
2017-01-10 02:05:27 +01:00
|
|
|
msg = (
|
2017-03-24 18:37:31 +01:00
|
|
|
"File was already being downloaded" if status == 'start'
|
2017-04-26 20:15:38 +02:00
|
|
|
else "File was already stopped"
|
2017-01-10 02:05:27 +01:00
|
|
|
)
|
2018-12-15 21:31:02 +01:00
|
|
|
return msg
|
2016-01-17 05:06:24 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(FILE_MANAGER_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Delete a LBRY file
|
|
|
|
|
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
2018-02-12 20:15:02 +01:00
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
|
|
|
|
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
|
|
|
|
[--channel_name=<channel_name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--delete_from_download_dir : (bool) delete file from download directory,
|
2018-02-12 20:15:02 +01:00
|
|
|
instead of just deleting blobs
|
2018-02-24 19:13:29 +01:00
|
|
|
--delete_all : (bool) if there are multiple matching files,
|
2018-02-12 20:15:02 +01:00
|
|
|
allow the deletion of multiple files.
|
|
|
|
Otherwise do not delete anything.
|
2018-02-24 19:13:29 +01:00
|
|
|
--sd_hash=<sd_hash> : (str) delete by file sd hash
|
2018-09-24 05:22:25 +02:00
|
|
|
--file_name=<file_name> : (str) delete by file name in downloads folder
|
2018-02-24 19:13:29 +01:00
|
|
|
--stream_hash=<stream_hash> : (str) delete by file stream hash
|
|
|
|
--rowid=<rowid> : (int) delete by file row id
|
|
|
|
--claim_id=<claim_id> : (str) delete by file claim id
|
|
|
|
--txid=<txid> : (str) delete by file claim txid
|
|
|
|
--nout=<nout> : (int) delete by file claim nout
|
|
|
|
--claim_name=<claim_name> : (str) delete by file claim name
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) delete by file channel claim name
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if deletion was successful
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_files = await self._get_lbry_files(return_json=False, **kwargs)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-03-08 00:18:47 +01:00
|
|
|
if len(lbry_files) > 1:
|
2017-04-07 02:45:05 +02:00
|
|
|
if not delete_all:
|
|
|
|
log.warning("There are %i files to delete, use narrower filters to select one",
|
|
|
|
len(lbry_files))
|
2018-12-15 21:31:02 +01:00
|
|
|
return False
|
2017-04-07 02:45:05 +02:00
|
|
|
else:
|
|
|
|
log.warning("Deleting %i files",
|
|
|
|
len(lbry_files))
|
|
|
|
|
|
|
|
if not lbry_files:
|
2017-03-08 20:19:54 +01:00
|
|
|
log.warning("There is no file to delete")
|
2018-12-15 21:31:02 +01:00
|
|
|
return False
|
2016-10-20 21:52:37 +02:00
|
|
|
else:
|
2017-04-07 02:45:05 +02:00
|
|
|
for lbry_file in lbry_files:
|
|
|
|
file_name, stream_hash = lbry_file.file_name, lbry_file.stream_hash
|
2017-11-06 18:26:42 +01:00
|
|
|
if lbry_file.sd_hash in self.streams:
|
|
|
|
del self.streams[lbry_file.sd_hash]
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.file_manager.delete_lbry_file(lbry_file, delete_file=delete_from_download_dir))
|
2017-06-15 21:58:32 +02:00
|
|
|
log.info("Deleted file: %s", file_name)
|
2018-12-15 21:31:02 +01:00
|
|
|
return True
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2018-10-30 18:41:38 +01:00
|
|
|
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
2018-07-25 21:33:43 +02:00
|
|
|
DHT_COMPONENT, RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2017-04-11 04:47:54 +02:00
|
|
|
def jsonrpc_stream_cost_estimate(self, uri, size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Get estimated cost for a lbry stream
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
stream_cost_estimate (<uri> | --uri=<uri>) [<size> | --size=<size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--uri=<uri> : (str) uri to use
|
|
|
|
--size=<size> : (float) stream size in bytes. if provided an sd blob won't be
|
2017-05-28 22:01:53 +02:00
|
|
|
downloaded.
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-04-18 18:50:07 +02:00
|
|
|
(float) Estimated cost in lbry credits, returns None if uri is not
|
2017-12-06 23:22:47 +01:00
|
|
|
resolvable
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-10-03 22:38:47 +02:00
|
|
|
return self.get_est_cost(uri, size)
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-10-18 01:07:17 +02:00
|
|
|
async def jsonrpc_channel_new(self, channel_name, amount, account_id=None):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Generate a publisher key and create a new '@' prefixed certificate claim
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
channel_new (<channel_name> | --channel_name=<channel_name>)
|
|
|
|
(<amount> | --amount=<amount>)
|
2018-10-18 01:07:17 +02:00
|
|
|
[--account_id=<account_id>]
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--channel_name=<channel_name> : (str) name of the channel prefixed with '@'
|
2018-09-24 05:22:25 +02:00
|
|
|
--amount=<amount> : (decimal) bid amount on the channel
|
2018-10-18 01:07:17 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to store channel
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
'tx' : (str) hex encoded transaction
|
|
|
|
'txid' : (str) txid of resulting claim
|
|
|
|
'nout' : (int) nout of the resulting claim
|
|
|
|
'fee' : (float) fee paid for the claim transaction
|
|
|
|
'claim_id' : (str) claim ID of the resulting claim
|
|
|
|
}
|
|
|
|
"""
|
2018-08-16 01:23:06 +02:00
|
|
|
try:
|
|
|
|
parsed = parse_lbry_uri(channel_name)
|
2019-01-08 22:37:53 +01:00
|
|
|
if not parsed.contains_channel:
|
2018-08-16 01:23:06 +02:00
|
|
|
raise Exception("Cannot make a new channel for a non channel name")
|
|
|
|
if parsed.path:
|
|
|
|
raise Exception("Invalid channel uri")
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid channel name")
|
2018-08-17 21:02:14 +02:00
|
|
|
|
2018-08-17 21:34:07 +02:00
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
2018-08-21 21:39:18 +02:00
|
|
|
if amount <= 0:
|
2018-08-16 01:23:06 +02:00
|
|
|
raise Exception("Invalid amount")
|
2018-10-18 01:07:17 +02:00
|
|
|
|
|
|
|
tx = await self.wallet_manager.claim_new_channel(
|
|
|
|
channel_name, amount, self.get_account_or_default(account_id)
|
|
|
|
)
|
2018-08-26 06:44:23 +02:00
|
|
|
self.default_wallet.save()
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_new_channel()
|
2018-08-16 01:23:06 +02:00
|
|
|
nout = 0
|
|
|
|
txo = tx.outputs[nout]
|
|
|
|
log.info("Claimed a new channel! lbry://%s txid: %s nout: %d", channel_name, tx.id, nout)
|
2018-10-08 16:41:07 +02:00
|
|
|
return {
|
2018-08-16 01:23:06 +02:00
|
|
|
"success": True,
|
|
|
|
"tx": tx,
|
|
|
|
"claim_id": txo.claim_id,
|
2018-10-08 16:41:07 +02:00
|
|
|
"claim_address": txo.get_address(self.ledger),
|
2018-08-16 01:23:06 +02:00
|
|
|
"output": txo
|
2018-10-08 16:41:07 +02:00
|
|
|
}
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_channel_list(self, account_id=None, page=None, page_size=None):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2017-11-22 19:50:07 +01:00
|
|
|
Get certificate claim infos for channels that can be published to
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-10-18 01:07:17 +02:00
|
|
|
channel_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-10-08 16:41:07 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
2017-11-22 19:50:07 +01:00
|
|
|
(list) ClaimDict, includes 'is_mine' field to indicate if the certificate claim
|
|
|
|
is in the wallet.
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_channels,
|
|
|
|
account.get_channel_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_channel_export(self, claim_id):
|
2017-11-22 19:46:34 +01:00
|
|
|
"""
|
|
|
|
Export serialized channel signing information for a given certificate claim id
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_export (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) Claim ID to export information about
|
|
|
|
|
2017-11-22 19:46:34 +01:00
|
|
|
Returns:
|
|
|
|
(str) Serialized certificate information
|
|
|
|
"""
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return await self.wallet_manager.export_certificate_info(claim_id)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_channel_import(self, serialized_certificate_info):
|
2017-11-22 19:46:34 +01:00
|
|
|
"""
|
|
|
|
Import serialized channel signing information (to allow signing new claims to the channel)
|
|
|
|
|
|
|
|
Usage:
|
2018-03-26 19:16:55 +02:00
|
|
|
channel_import (<serialized_certificate_info> | --serialized_certificate_info=<serialized_certificate_info>)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--serialized_certificate_info=<serialized_certificate_info> : (str) certificate info
|
|
|
|
|
2017-11-22 19:46:34 +01:00
|
|
|
Returns:
|
|
|
|
(dict) Result dictionary
|
|
|
|
"""
|
|
|
|
|
2018-12-15 21:31:02 +01:00
|
|
|
return await self.wallet_manager.import_certificate_info(serialized_certificate_info)
|
2017-11-22 19:46:34 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, BLOB_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2018-10-18 01:07:17 +02:00
|
|
|
async def jsonrpc_publish(
|
|
|
|
self, name, bid, metadata=None, file_path=None, fee=None, title=None,
|
|
|
|
description=None, author=None, language=None, license=None,
|
|
|
|
license_url=None, thumbnail=None, preview=None, nsfw=None, sources=None,
|
|
|
|
channel_name=None, channel_id=None, channel_account_id=None, account_id=None,
|
|
|
|
claim_address=None, change_address=None):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-03-28 17:47:01 +02:00
|
|
|
Make a new name claim and publish associated data to lbrynet,
|
|
|
|
update over existing claim if user already has a claim for name.
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2017-03-16 19:29:34 +01:00
|
|
|
Fields required in the final Metadata are:
|
|
|
|
'title'
|
|
|
|
'description'
|
|
|
|
'author'
|
|
|
|
'language'
|
2017-05-28 22:01:53 +02:00
|
|
|
'license'
|
2017-03-16 19:29:34 +01:00
|
|
|
'nsfw'
|
|
|
|
|
|
|
|
Metadata can be set by either using the metadata argument or by setting individual arguments
|
|
|
|
fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,
|
|
|
|
or sources. Individual arguments will overwrite the fields specified in metadata argument.
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
publish (<name> | --name=<name>) (<bid> | --bid=<bid>) [--metadata=<metadata>]
|
|
|
|
[--file_path=<file_path>] [--fee=<fee>] [--title=<title>]
|
|
|
|
[--description=<description>] [--author=<author>] [--language=<language>]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail=<thumbnail>]
|
|
|
|
[--preview=<preview>] [--nsfw=<nsfw>] [--sources=<sources>]
|
|
|
|
[--channel_name=<channel_name>] [--channel_id=<channel_id>]
|
2018-10-18 01:07:17 +02:00
|
|
|
[--channel_account_id=<channel_account_id>...] [--account_id=<account_id>]
|
2017-06-12 19:32:01 +02:00
|
|
|
[--claim_address=<claim_address>] [--change_address=<change_address>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-05 21:38:30 +02:00
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
2018-06-09 23:38:33 +02:00
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
2018-02-24 19:13:29 +01:00
|
|
|
--metadata=<metadata> : (dict) ClaimDict to associate with the claim.
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name. If provided,
|
2017-05-28 22:01:53 +02:00
|
|
|
a lbry stream of this file will be used in 'sources'.
|
2017-11-09 01:50:37 +01:00
|
|
|
If no path is given but a sources dict is provided,
|
|
|
|
it will be used. If neither are provided, an
|
|
|
|
error is raised.
|
2018-02-24 19:13:29 +01:00
|
|
|
--fee=<fee> : (dict) Dictionary representing key fee to download content:
|
2017-05-30 23:07:23 +02:00
|
|
|
{
|
|
|
|
'currency': currency_symbol,
|
2018-06-09 23:38:33 +02:00
|
|
|
'amount': decimal,
|
2017-05-30 23:07:23 +02:00
|
|
|
'address': str, optional
|
|
|
|
}
|
2017-05-28 22:01:53 +02:00
|
|
|
supported currencies: LBC, USD, BTC
|
|
|
|
If an address is not provided a new one will be
|
|
|
|
automatically generated. Default fee is zero.
|
2018-02-24 19:13:29 +01:00
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
2018-08-31 00:05:10 +02:00
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
2018-02-24 19:13:29 +01:00
|
|
|
--language=<language> : (str) language of the publication
|
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail=<thumbnail> : (str) thumbnail url
|
|
|
|
--preview=<preview> : (str) preview url
|
2018-07-05 18:00:43 +02:00
|
|
|
--nsfw=<nsfw> : (bool) whether the content is nsfw
|
2018-02-24 19:13:29 +01:00
|
|
|
--sources=<sources> : (str) {'lbry_sd_hash': sd_hash} specifies sd hash of file
|
|
|
|
--channel_name=<channel_name> : (str) name of the publisher channel name in the wallet
|
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel, does not check
|
2017-05-28 22:01:53 +02:00
|
|
|
for channel claim being in the wallet. This allows
|
|
|
|
publishing to a channel where only the certificate
|
|
|
|
private key is in the wallet.
|
2018-10-18 01:07:17 +02:00
|
|
|
--channel_account_id=<channel_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for funding the transaction
|
2018-02-24 19:13:29 +01:00
|
|
|
--claim_address=<claim_address> : (str) address where the claim is sent to, if not specified
|
2018-10-18 01:07:17 +02:00
|
|
|
new address will automatically be created
|
2017-05-20 17:59:55 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
'tx' : (str) hex encoded transaction
|
|
|
|
'txid' : (str) txid of resulting claim
|
|
|
|
'nout' : (int) nout of the resulting claim
|
2018-06-09 23:38:33 +02:00
|
|
|
'fee' : (decimal) fee paid for the claim transaction
|
2017-03-14 00:14:11 +01:00
|
|
|
'claim_id' : (str) claim ID of the resulting claim
|
|
|
|
}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-07-25 07:40:26 +02:00
|
|
|
|
2017-04-12 19:01:53 +02:00
|
|
|
try:
|
|
|
|
parse_lbry_uri(name)
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid name given to publish")
|
|
|
|
|
2018-08-27 17:37:40 +02:00
|
|
|
amount = self.get_dewies_or_error('bid', bid)
|
|
|
|
if amount <= 0:
|
2018-02-17 15:48:55 +01:00
|
|
|
raise ValueError("Bid value must be greater than 0.0")
|
2017-03-16 19:29:34 +01:00
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
for address in [claim_address, change_address]:
|
|
|
|
if address is not None:
|
|
|
|
# raises an error if the address is invalid
|
|
|
|
decode_address(address)
|
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
|
|
|
|
|
|
|
available = await account.get_balance()
|
2018-08-27 17:37:40 +02:00
|
|
|
if amount >= available:
|
2018-11-06 05:29:34 +01:00
|
|
|
existing_claims = await account.get_claims(claim_name=name)
|
|
|
|
if len(existing_claims) == 1:
|
|
|
|
available += existing_claims[0].get_estimator(self.ledger).effective_amount
|
|
|
|
if amount >= available:
|
|
|
|
raise InsufficientFundsError(
|
|
|
|
f"Please lower the bid value, the maximum amount "
|
|
|
|
f"you can specify for this claim is {dewies_to_lbc(available)}."
|
|
|
|
)
|
2018-02-14 19:01:57 +01:00
|
|
|
|
2017-03-16 19:29:34 +01:00
|
|
|
metadata = metadata or {}
|
|
|
|
if fee is not None:
|
|
|
|
metadata['fee'] = fee
|
|
|
|
if title is not None:
|
|
|
|
metadata['title'] = title
|
|
|
|
if description is not None:
|
|
|
|
metadata['description'] = description
|
|
|
|
if author is not None:
|
|
|
|
metadata['author'] = author
|
|
|
|
if language is not None:
|
|
|
|
metadata['language'] = language
|
|
|
|
if license is not None:
|
|
|
|
metadata['license'] = license
|
|
|
|
if license_url is not None:
|
2017-05-30 16:48:21 +02:00
|
|
|
metadata['licenseUrl'] = license_url
|
2017-03-16 19:29:34 +01:00
|
|
|
if thumbnail is not None:
|
|
|
|
metadata['thumbnail'] = thumbnail
|
|
|
|
if preview is not None:
|
|
|
|
metadata['preview'] = preview
|
|
|
|
if nsfw is not None:
|
|
|
|
metadata['nsfw'] = bool(nsfw)
|
|
|
|
|
2017-04-03 21:58:20 +02:00
|
|
|
metadata['version'] = '_0_1_0'
|
|
|
|
|
2017-06-02 19:56:41 +02:00
|
|
|
# check for original deprecated format {'currency':{'address','amount'}}
|
|
|
|
# add address, version to fee if unspecified
|
2017-03-16 19:29:34 +01:00
|
|
|
if 'fee' in metadata:
|
2017-06-02 19:56:41 +02:00
|
|
|
if len(metadata['fee'].keys()) == 1 and isinstance(metadata['fee'].values()[0], dict):
|
2018-06-09 23:38:33 +02:00
|
|
|
raise Exception('Old format for fee no longer supported. '
|
2017-06-02 19:56:41 +02:00
|
|
|
'Fee must be specified as {"currency":,"address":,"amount":}')
|
|
|
|
|
2017-05-30 16:49:22 +02:00
|
|
|
if 'amount' in metadata['fee'] and 'currency' in metadata['fee']:
|
|
|
|
if not metadata['fee']['amount']:
|
|
|
|
log.warning("Stripping empty fee from published metadata")
|
|
|
|
del metadata['fee']
|
|
|
|
elif 'address' not in metadata['fee']:
|
2018-10-18 01:07:17 +02:00
|
|
|
address = await account.receiving.get_or_create_usable_address()
|
2017-05-30 16:49:22 +02:00
|
|
|
metadata['fee']['address'] = address
|
2017-06-02 17:50:36 +02:00
|
|
|
if 'fee' in metadata and 'version' not in metadata['fee']:
|
|
|
|
metadata['fee']['version'] = '_0_0_1'
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2017-04-03 21:58:20 +02:00
|
|
|
claim_dict = {
|
2017-04-07 02:45:05 +02:00
|
|
|
'version': '_0_0_1',
|
|
|
|
'claimType': 'streamType',
|
|
|
|
'stream': {
|
|
|
|
'metadata': metadata,
|
|
|
|
'version': '_0_0_1'
|
|
|
|
}
|
|
|
|
}
|
2017-04-03 21:58:20 +02:00
|
|
|
|
2018-09-17 22:31:44 +02:00
|
|
|
# this will be used to verify the format with lbrynet.schema
|
2017-11-09 01:50:37 +01:00
|
|
|
claim_copy = deepcopy(claim_dict)
|
2017-04-03 21:58:20 +02:00
|
|
|
if sources is not None:
|
|
|
|
claim_dict['stream']['source'] = sources
|
2017-11-09 01:50:37 +01:00
|
|
|
claim_copy['stream']['source'] = sources
|
|
|
|
elif file_path is not None:
|
|
|
|
if not os.path.isfile(file_path):
|
|
|
|
raise Exception("invalid file path to publish")
|
|
|
|
# since the file hasn't yet been made into a stream, we don't have
|
|
|
|
# a valid Source for the claim when validating the format, we'll use a fake one
|
|
|
|
claim_copy['stream']['source'] = {
|
|
|
|
'version': '_0_0_1',
|
|
|
|
'sourceType': 'lbry_sd_hash',
|
|
|
|
'source': '0' * 96,
|
|
|
|
'contentType': ''
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
# there is no existing source to use, and a file was not provided to make a new one
|
|
|
|
raise Exception("no source provided to publish")
|
|
|
|
try:
|
|
|
|
ClaimDict.load_dict(claim_copy)
|
2018-09-17 22:31:44 +02:00
|
|
|
# the metadata to use in the claim can be serialized by lbrynet.schema
|
2017-11-09 01:50:37 +01:00
|
|
|
except DecodeError as err:
|
|
|
|
# there was a problem with a metadata field, raise an error here rather than
|
|
|
|
# waiting to find out when we go to publish the claim (after having made the stream)
|
2018-10-25 23:51:17 +02:00
|
|
|
raise Exception(f"invalid publish metadata: {err}")
|
2017-04-03 21:58:20 +02:00
|
|
|
|
2018-09-19 04:23:41 +02:00
|
|
|
certificate = None
|
|
|
|
if channel_id or channel_name:
|
2018-10-18 01:07:17 +02:00
|
|
|
certificate = await self.get_channel_or_error(
|
|
|
|
self.get_accounts_or_all(channel_account_id), channel_id, channel_name
|
|
|
|
)
|
2018-09-19 04:23:41 +02:00
|
|
|
|
2017-05-30 16:49:22 +02:00
|
|
|
log.info("Publish: %s", {
|
|
|
|
'name': name,
|
|
|
|
'file_path': file_path,
|
2018-10-18 01:50:31 +02:00
|
|
|
'bid': dewies_to_lbc(amount),
|
2017-06-12 19:32:01 +02:00
|
|
|
'claim_address': claim_address,
|
|
|
|
'change_address': change_address,
|
2017-05-30 16:49:22 +02:00
|
|
|
'claim_dict': claim_dict,
|
2017-12-29 20:09:21 +01:00
|
|
|
'channel_id': channel_id,
|
|
|
|
'channel_name': channel_name
|
2017-05-30 16:49:22 +02:00
|
|
|
})
|
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
return await self._publish_stream(
|
2018-10-18 01:07:17 +02:00
|
|
|
account, name, amount, claim_dict, file_path,
|
|
|
|
certificate, claim_address, change_address
|
2018-10-16 21:04:20 +02:00
|
|
|
)
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-11-07 20:43:07 +01:00
|
|
|
async def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None, account_id=None, blocking=True):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
Abandon a name and reclaim credits from the claim
|
2016-12-09 04:05:31 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-08-08 18:37:28 +02:00
|
|
|
claim_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
2018-09-24 05:22:25 +02:00
|
|
|
[--account_id=<account_id>]
|
2018-11-07 20:43:07 +01:00
|
|
|
[--blocking]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-11-07 20:43:07 +01:00
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
2018-09-18 23:18:02 +02:00
|
|
|
success: (bool) True if txn is successful
|
2017-03-14 00:14:11 +01:00
|
|
|
txid : (str) txid of resulting transaction
|
|
|
|
}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-09-19 22:58:50 +02:00
|
|
|
|
2017-08-08 18:37:28 +02:00
|
|
|
if claim_id is None and txid is None and nout is None:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
if txid is None and nout is not None:
|
|
|
|
raise Exception('Must specify txid')
|
|
|
|
if nout is None and txid is not None:
|
|
|
|
raise Exception('Must specify nout')
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2018-10-16 21:04:20 +02:00
|
|
|
tx = await self.wallet_manager.abandon_claim(claim_id, txid, nout, account)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_claim_action('abandon')
|
2018-11-07 20:43:07 +01:00
|
|
|
if blocking:
|
|
|
|
await self.ledger.wait(tx)
|
2018-10-16 21:04:20 +02:00
|
|
|
return {"success": True, "tx": tx}
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_claim_new_support(self, name, claim_id, amount, account_id=None):
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
|
|
|
Support a name claim
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
claim_new_support (<name> | --name=<name>) (<claim_id> | --claim_id=<claim_id>)
|
2018-09-24 05:22:25 +02:00
|
|
|
(<amount> | --amount=<amount>) [--account_id=<account_id>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--name=<name> : (str) name of the claim to support
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to support
|
|
|
|
--amount=<amount> : (decimal) amount of support
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Returns:
|
2018-08-31 22:49:55 +02:00
|
|
|
(dict) Dictionary containing the transaction information
|
2017-03-14 00:14:11 +01:00
|
|
|
{
|
2018-08-31 22:49:55 +02:00
|
|
|
"hex": (str) raw transaction,
|
|
|
|
"inputs": (list) inputs(dict) used for the transaction,
|
|
|
|
"outputs": (list) outputs(dict) for the transaction,
|
|
|
|
"total_fee": (int) fee in dewies,
|
|
|
|
"total_input": (int) total of inputs in dewies,
|
|
|
|
"total_output": (int) total of outputs in dewies(input - fees),
|
|
|
|
"txid": (str) txid of the transaction,
|
2017-03-14 00:14:11 +01:00
|
|
|
}
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-08-29 21:43:05 +02:00
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
2018-10-16 21:04:20 +02:00
|
|
|
result = await self.wallet_manager.support_claim(name, claim_id, amount, account)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_claim_action('new_support')
|
2018-08-31 22:49:55 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-10-16 21:04:20 +02:00
|
|
|
async def jsonrpc_claim_tip(self, claim_id, amount, account_id=None):
|
2018-08-31 22:49:55 +02:00
|
|
|
"""
|
2018-09-04 21:05:45 +02:00
|
|
|
Tip the owner of the claim
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
claim_tip (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
|
|
|
|
[--account_id=<account_id>]
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to support
|
|
|
|
--amount=<amount> : (decimal) amount of support
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
2018-08-31 22:49:55 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary containing the transaction information
|
|
|
|
{
|
|
|
|
"hex": (str) raw transaction,
|
|
|
|
"inputs": (list) inputs(dict) used for the transaction,
|
|
|
|
"outputs": (list) outputs(dict) for the transaction,
|
|
|
|
"total_fee": (int) fee in dewies,
|
|
|
|
"total_input": (int) total of inputs in dewies,
|
|
|
|
"total_output": (int) total of outputs in dewies(input - fees),
|
|
|
|
"txid": (str) txid of the transaction,
|
|
|
|
}
|
|
|
|
"""
|
2018-09-21 15:47:06 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-08-31 22:49:55 +02:00
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
|
|
|
validate_claim_id(claim_id)
|
2018-10-16 21:04:20 +02:00
|
|
|
result = await self.wallet_manager.tip_claim(amount, claim_id, account)
|
2019-01-09 01:02:49 +01:00
|
|
|
await self.analytics_manager.send_claim_action('new_support')
|
2018-08-31 22:49:55 +02:00
|
|
|
return result
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2018-12-13 04:32:44 +01:00
|
|
|
@deprecated()
|
2017-11-28 17:00:17 +01:00
|
|
|
def jsonrpc_claim_renew(self, outpoint=None, height=None):
|
2018-09-25 22:21:20 +02:00
|
|
|
pass
|
2017-11-28 17:00:17 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2017-06-12 16:30:18 +02:00
|
|
|
def jsonrpc_claim_send_to_address(self, claim_id, address, amount=None):
|
|
|
|
"""
|
|
|
|
Send a name claim to an address
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_send_to_address (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
(<address> | --address=<address>)
|
|
|
|
[<amount> | --amount=<amount>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--claim_id=<claim_id> : (str) claim_id to send
|
|
|
|
--address=<address> : (str) address to send the claim to
|
2018-09-24 05:22:25 +02:00
|
|
|
--amount=<amount> : (int) Amount of credits to claim name for,
|
|
|
|
defaults to the current amount on the claim
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
'tx' : (str) hex encoded transaction
|
|
|
|
'txid' : (str) txid of resulting claim
|
|
|
|
'nout' : (int) nout of the resulting claim
|
|
|
|
'fee' : (float) fee paid for the claim transaction
|
|
|
|
'claim_id' : (str) claim ID of the resulting claim
|
|
|
|
}
|
|
|
|
|
2017-06-12 16:30:18 +02:00
|
|
|
"""
|
2018-10-03 18:00:21 +02:00
|
|
|
decode_address(address)
|
|
|
|
return self.wallet_manager.send_claim_to_address(
|
|
|
|
claim_id, address, self.get_dewies_or_error("amount", amount) if amount else None
|
|
|
|
)
|
2017-06-12 16:30:18 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_claim_list_mine(self, account_id=None, page=None, page_size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
List my name claims
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
claim_list_mine [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of name claims owned by user
|
2017-03-11 22:04:10 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'address': (str) address that owns the claim
|
|
|
|
'amount': (float) amount assigned to the claim
|
|
|
|
'blocks_to_expiration': (int) number of blocks until it expires
|
|
|
|
'category': (str) "claim", "update" , or "support"
|
|
|
|
'claim_id': (str) claim ID of the claim
|
|
|
|
'confirmations': (int) number of blocks of confirmations for the claim
|
|
|
|
'expiration_height': (int) the block height which the claim will expire
|
|
|
|
'expired': (bool) true if expired, false otherwise
|
|
|
|
'height': (int) height of the block containing the claim
|
|
|
|
'is_spent': (bool) true if claim is abandoned, false otherwise
|
|
|
|
'name': (str) name of the claim
|
2018-01-26 11:16:02 +01:00
|
|
|
'permanent_url': (str) permanent url of the claim,
|
2018-07-05 18:00:43 +02:00
|
|
|
'txid': (str) txid of the claim
|
2017-03-14 00:14:11 +01:00
|
|
|
'nout': (int) nout of the claim
|
|
|
|
'value': (str) value of the claim
|
2017-03-11 22:04:10 +01:00
|
|
|
},
|
|
|
|
]
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_claims,
|
|
|
|
account.get_claim_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-17 16:30:24 +02:00
|
|
|
async def jsonrpc_claim_list(self, name):
|
2016-08-08 07:43:10 +02:00
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
List current claims and information about them for a given name
|
2016-08-08 07:43:10 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
claim_list (<name> | --name=<name>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the claim to list info about
|
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) State of claims assigned for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'claims': (list) list of claims for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'amount': (float) amount assigned to the claim
|
|
|
|
'effective_amount': (float) total amount assigned to the claim,
|
|
|
|
including supports
|
|
|
|
'claim_id': (str) claim ID of the claim
|
|
|
|
'height': (int) height of block containing the claim
|
|
|
|
'txid': (str) txid of the claim
|
|
|
|
'nout': (int) nout of the claim
|
2018-01-26 11:16:02 +01:00
|
|
|
'permanent_url': (str) permanent url of the claim,
|
2017-03-14 00:14:11 +01:00
|
|
|
'supports': (list) a list of supports attached to the claim
|
|
|
|
'value': (str) the value of the claim
|
2017-03-11 22:04:10 +01:00
|
|
|
},
|
|
|
|
]
|
2017-03-14 00:14:11 +01:00
|
|
|
'supports_without_claims': (list) supports without any claims attached to them
|
|
|
|
'last_takeover_height': (int) the height of last takeover for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
}
|
2016-08-08 07:43:10 +02:00
|
|
|
"""
|
2018-10-17 16:30:24 +02:00
|
|
|
claims = await self.wallet_manager.get_claims_for_name(name) # type: dict
|
2018-05-25 09:09:13 +02:00
|
|
|
sort_claim_results(claims['claims'])
|
2018-09-21 15:47:06 +02:00
|
|
|
return claims
|
2016-08-08 07:43:10 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-17 16:42:55 +02:00
|
|
|
async def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]):
|
2017-06-09 19:47:32 +02:00
|
|
|
"""
|
|
|
|
Get paginated claims in a channel specified by a channel uri
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_list_by_channel (<uri> | --uri=<uri>) [<uris>...] [--page=<page>]
|
|
|
|
[--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--uri=<uri> : (str) uri of the channel
|
|
|
|
--uris=<uris> : (list) uris of the channel
|
|
|
|
--page=<page> : (int) which page of results to return where page 1 is the first
|
|
|
|
page, defaults to no pages
|
|
|
|
--page_size=<page_size> : (int) number of results in a page, default of 10
|
2017-06-09 19:47:32 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
{
|
|
|
|
resolved channel uri: {
|
|
|
|
If there was an error:
|
|
|
|
'error': (str) error message
|
|
|
|
|
2017-07-24 21:23:23 +02:00
|
|
|
'claims_in_channel': the total number of results for the channel,
|
2017-06-09 19:47:32 +02:00
|
|
|
|
|
|
|
If a page of results was requested:
|
|
|
|
'returned_page': page number returned,
|
|
|
|
'claims_in_channel': [
|
|
|
|
{
|
|
|
|
'absolute_channel_position': (int) claim index number in sorted list of
|
|
|
|
claims which assert to be part of the
|
|
|
|
channel
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
2017-06-09 19:47:32 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
uris = tuple(uris)
|
2018-09-18 21:28:47 +02:00
|
|
|
page = int(page)
|
|
|
|
page_size = int(page_size)
|
2017-06-09 19:47:32 +02:00
|
|
|
if uri is not None:
|
2017-12-21 02:46:41 +01:00
|
|
|
uris += (uri,)
|
2017-06-09 19:47:32 +02:00
|
|
|
|
|
|
|
results = {}
|
|
|
|
|
|
|
|
valid_uris = tuple()
|
|
|
|
for chan_uri in uris:
|
|
|
|
try:
|
|
|
|
parsed = parse_lbry_uri(chan_uri)
|
2019-01-08 22:37:53 +01:00
|
|
|
if not parsed.contains_channel:
|
2017-06-09 19:47:32 +02:00
|
|
|
results[chan_uri] = {"error": "%s is not a channel uri" % parsed.name}
|
|
|
|
elif parsed.path:
|
|
|
|
results[chan_uri] = {"error": "%s is a claim in a channel" % parsed.path}
|
|
|
|
else:
|
2017-12-21 02:46:41 +01:00
|
|
|
valid_uris += (chan_uri,)
|
2017-06-09 19:47:32 +02:00
|
|
|
except URIParseError:
|
|
|
|
results[chan_uri] = {"error": "%s is not a valid uri" % chan_uri}
|
|
|
|
|
2018-10-17 16:42:55 +02:00
|
|
|
resolved = await self.wallet_manager.resolve(*valid_uris, page=page, page_size=page_size)
|
2018-12-11 00:10:45 +01:00
|
|
|
if 'error' in resolved:
|
|
|
|
return {'error': resolved['error']}
|
2017-06-09 19:47:32 +02:00
|
|
|
for u in resolved:
|
|
|
|
if 'error' in resolved[u]:
|
|
|
|
results[u] = resolved[u]
|
|
|
|
else:
|
|
|
|
results[u] = {
|
2017-12-21 02:46:41 +01:00
|
|
|
'claims_in_channel': resolved[u]['claims_in_channel']
|
|
|
|
}
|
2017-06-09 19:47:32 +02:00
|
|
|
if page:
|
|
|
|
results[u]['returned_page'] = page
|
|
|
|
results[u]['claims_in_channel'] = resolved[u].get('claims_in_channel', [])
|
2018-10-17 16:42:55 +02:00
|
|
|
return results
|
2017-06-09 19:47:32 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_transaction_list(self, account_id=None, page=None, page_size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
List transactions belonging to wallet
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-09-24 05:22:25 +02:00
|
|
|
transaction_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
Returns:
|
2018-01-18 19:41:17 +01:00
|
|
|
(list) List of transactions
|
2017-08-31 15:58:30 +02:00
|
|
|
|
|
|
|
{
|
2018-01-11 20:41:35 +01:00
|
|
|
"claim_info": (list) claim info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
|
|
|
"balance_delta": (float) bid amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"abandon_info": (list) abandon info if in txn [{
|
|
|
|
"address": (str) address of abandoned claim,
|
|
|
|
"balance_delta": (float) returned amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"confirmations": (int) number of confirmations for the txn,
|
|
|
|
"date": (str) date and time of txn,
|
|
|
|
"fee": (float) txn fee,
|
2018-01-11 20:41:35 +01:00
|
|
|
"support_info": (list) support info if in txn [{
|
|
|
|
"address": (str) address of support,
|
|
|
|
"balance_delta": (float) support amount,
|
|
|
|
"amount": (float) support amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"is_tip": (bool),
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"timestamp": (int) timestamp,
|
|
|
|
"txid": (str) txn id,
|
2018-01-11 20:41:35 +01:00
|
|
|
"update_info": (list) update info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
2018-01-18 19:41:17 +01:00
|
|
|
"balance_delta": (float) credited/debited
|
2018-01-11 20:41:35 +01:00
|
|
|
"amount": (float) absolute amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
2017-08-31 15:58:30 +02:00
|
|
|
"value": (float) value of txn
|
|
|
|
}
|
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
self.wallet_manager.get_history,
|
|
|
|
self.ledger.db.get_transaction_count,
|
|
|
|
page, page_size, account=account
|
|
|
|
)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_transaction_show(self, txid):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Get a decoded transaction from a txid
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
transaction_show (<txid> | --txid=<txid>)
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--txid=<txid> : (str) txid of the transaction
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) JSON formatted transaction
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2018-09-19 15:58:50 +02:00
|
|
|
return self.wallet_manager.get_transaction(txid)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2018-10-10 02:46:41 +02:00
|
|
|
def jsonrpc_utxo_list(self, account_id=None, page=None, page_size=None):
|
2017-11-01 22:17:38 +01:00
|
|
|
"""
|
|
|
|
List unspent transaction outputs
|
|
|
|
|
|
|
|
Usage:
|
2018-10-09 16:30:30 +02:00
|
|
|
utxo_list [<account_id> | --account_id=<account_id>]
|
2018-10-10 02:46:41 +02:00
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
2017-11-01 22:17:38 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
2018-09-24 05:22:25 +02:00
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
2018-10-10 02:46:41 +02:00
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
2018-02-24 19:13:29 +01:00
|
|
|
|
2017-11-01 22:17:38 +01:00
|
|
|
Returns:
|
2017-11-02 12:14:26 +01:00
|
|
|
(list) List of unspent transaction outputs (UTXOs)
|
2017-11-01 22:17:38 +01:00
|
|
|
[
|
|
|
|
{
|
|
|
|
"address": (str) the output address
|
|
|
|
"amount": (float) unspent amount
|
|
|
|
"height": (int) block height
|
|
|
|
"is_claim": (bool) is the tx a claim
|
|
|
|
"is_coinbase": (bool) is the tx a coinbase tx
|
|
|
|
"is_support": (bool) is the tx a support
|
|
|
|
"is_update": (bool) is the tx an update
|
|
|
|
"nout": (int) nout of the output
|
|
|
|
"txid": (str) txid of the output
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
2018-10-08 16:41:07 +02:00
|
|
|
account = self.get_account_or_default(account_id)
|
2018-10-10 02:46:41 +02:00
|
|
|
return maybe_paginate(
|
|
|
|
account.get_utxos,
|
|
|
|
account.get_utxo_count,
|
|
|
|
page, page_size
|
|
|
|
)
|
2017-11-01 22:17:38 +01:00
|
|
|
|
2019-01-04 08:49:29 +01:00
|
|
|
@requires(WALLET_COMPONENT)
|
|
|
|
def jsonrpc_utxo_release(self, account_id=None):
|
|
|
|
"""
|
|
|
|
When spending a UTXO it is locally locked to prevent double spends;
|
|
|
|
occasionally this can result in a UTXO being locked which ultimately
|
|
|
|
did not get spent (failed to broadcast, spend transaction was not
|
|
|
|
accepted by blockchain node, etc). This command releases the lock
|
|
|
|
on all UTXOs in your account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
utxo_release [<account_id> | --account_id=<account_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
|
|
|
return self.get_account_or_default(account_id).release_all_outputs()
|
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(WALLET_COMPONENT)
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_block_show(self, blockhash=None, height=None):
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Get contents of a block
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blockhash=<blockhash> : (str) hash of the block to look up
|
|
|
|
--height=<height> : (int) height of the block to look up
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
|
|
|
(dict) Requested block
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2018-09-07 08:57:58 +02:00
|
|
|
return self.wallet_manager.get_block(blockhash, height)
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT, RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT,
|
|
|
|
conditions=[WALLET_IS_UNLOCKED])
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None):
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
Download and return a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>]
|
|
|
|
[--encoding=<encoding>] [--payment_rate_manager=<payment_rate_manager>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
|
|
|
|
--timeout=<timeout> : (int) timeout in number of seconds
|
|
|
|
--encoding=<encoding> : (str) by default no attempt at decoding
|
|
|
|
is made, can be set to one of the
|
2017-05-28 22:01:53 +02:00
|
|
|
following decoders:
|
|
|
|
'json'
|
2018-02-24 19:13:29 +01:00
|
|
|
--payment_rate_manager=<payment_rate_manager> : (str) if not given the default payment rate
|
2017-05-28 22:01:53 +02:00
|
|
|
manager will be used.
|
|
|
|
supported alternative rate managers:
|
|
|
|
'only-free'
|
2017-02-16 05:39:17 +01:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/Fail message or (dict) decoded data
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
decoders = {
|
|
|
|
'json': json.loads
|
|
|
|
}
|
|
|
|
|
|
|
|
timeout = timeout or 30
|
2018-12-15 21:31:02 +01:00
|
|
|
blob = await self._download_blob(
|
|
|
|
blob_hash, rate_manager=self.payment_rate_manager, timeout=timeout
|
|
|
|
)
|
2017-02-16 05:39:17 +01:00
|
|
|
if encoding and encoding in decoders:
|
|
|
|
blob_file = blob.open_for_reading()
|
|
|
|
result = decoders[encoding](blob_file.read())
|
2017-09-27 23:00:51 +02:00
|
|
|
blob_file.close()
|
2017-02-16 05:39:17 +01:00
|
|
|
else:
|
|
|
|
result = "Downloaded blob %s" % blob_hash
|
|
|
|
|
2018-09-21 15:47:06 +02:00
|
|
|
return result
|
2017-02-16 05:39:17 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT, DATABASE_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_delete(self, blob_hash):
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
Delete a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2018-11-14 20:02:07 +01:00
|
|
|
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
|
2017-06-09 18:14:03 +02:00
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/fail message
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
if blob_hash not in self.blob_manager.blobs:
|
2018-09-21 15:47:06 +02:00
|
|
|
return "Don't have that blob"
|
2017-02-16 05:39:17 +01:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(blob_hash)
|
|
|
|
await self.storage.delete_stream(stream_hash)
|
2017-02-16 05:39:17 +01:00
|
|
|
except Exception as err:
|
|
|
|
pass
|
2018-12-15 21:31:02 +01:00
|
|
|
await d2f(self.blob_manager.delete_blobs([blob_hash]))
|
2018-09-21 15:47:06 +02:00
|
|
|
return "Deleted %s" % blob_hash
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_peer_list(self, blob_hash, timeout=None):
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
Get peers for blob hash
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-01 18:16:19 +02:00
|
|
|
peer_list (<blob_hash> | --blob_hash=<blob_hash>) [<timeout> | --timeout=<timeout>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
|
|
|
|
--timeout=<timeout> : (int) peer search timeout in seconds
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-08-03 09:16:06 +02:00
|
|
|
Returns:
|
2018-04-03 19:06:16 +02:00
|
|
|
(list) List of contact dictionaries {'host': <peer ip>, 'port': <peer port>, 'node_id': <peer node id>}
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
|
2018-11-12 20:45:41 +01:00
|
|
|
if not is_valid_blobhash(blob_hash):
|
2018-04-03 19:06:16 +02:00
|
|
|
raise Exception("invalid blob hash")
|
|
|
|
|
2018-07-05 04:16:02 +02:00
|
|
|
finished_deferred = self.dht_node.iterativeFindValue(unhexlify(blob_hash))
|
2018-04-03 19:06:16 +02:00
|
|
|
|
2018-05-24 00:26:33 +02:00
|
|
|
def trap_timeout(err):
|
|
|
|
err.trap(defer.TimeoutError)
|
|
|
|
return []
|
2017-02-16 05:38:33 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
finished_deferred.addTimeout(timeout or conf.settings['peer_search_timeout'], self.dht_node.clock)
|
2018-05-24 00:26:33 +02:00
|
|
|
finished_deferred.addErrback(trap_timeout)
|
2018-12-15 21:31:02 +01:00
|
|
|
peers = await d2f(finished_deferred)
|
2018-04-03 19:06:16 +02:00
|
|
|
results = [
|
|
|
|
{
|
2018-08-10 23:13:37 +02:00
|
|
|
"node_id": hexlify(node_id).decode(),
|
2018-04-03 19:06:16 +02:00
|
|
|
"host": host,
|
2018-05-24 00:26:33 +02:00
|
|
|
"port": port
|
2018-04-03 19:06:16 +02:00
|
|
|
}
|
2018-05-24 00:26:33 +02:00
|
|
|
for node_id, host, port in peers
|
2018-04-03 19:06:16 +02:00
|
|
|
]
|
2018-09-21 15:47:06 +02:00
|
|
|
return results
|
2016-08-03 09:16:06 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(DATABASE_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
|
2017-07-21 20:05:08 +02:00
|
|
|
"""
|
|
|
|
Announce blobs to the DHT
|
|
|
|
|
|
|
|
Usage:
|
2019-01-11 06:54:03 +01:00
|
|
|
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
|
|
|
|
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
|
2017-07-21 20:05:08 +02:00
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
|
|
|
|
--stream_hash=<stream_hash> : (str) announce all blobs associated with
|
|
|
|
stream_hash
|
|
|
|
--sd_hash=<sd_hash> : (str) announce all blobs associated with
|
|
|
|
sd_hash and the sd_hash itself
|
2017-07-21 20:05:08 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if successful
|
|
|
|
"""
|
2018-03-29 00:47:37 +02:00
|
|
|
blob_hashes = []
|
|
|
|
if blob_hash:
|
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
elif stream_hash or sd_hash:
|
|
|
|
if sd_hash and stream_hash:
|
|
|
|
raise Exception("either the sd hash or the stream hash should be provided, not both")
|
|
|
|
if sd_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
|
2018-03-29 00:47:37 +02:00
|
|
|
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
|
2017-07-21 20:05:08 +02:00
|
|
|
else:
|
2018-03-29 00:47:37 +02:00
|
|
|
raise Exception('single argument must be specified')
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
|
2018-09-21 15:47:06 +02:00
|
|
|
return True
|
2017-07-21 20:05:08 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(FILE_MANAGER_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_file_reflect(self, **kwargs):
|
2016-08-18 03:33:41 +02:00
|
|
|
"""
|
2017-08-04 20:48:35 +02:00
|
|
|
Reflect all the blobs in a file matching the filter criteria
|
2016-08-18 03:33:41 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-08-04 20:48:35 +02:00
|
|
|
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
2017-11-28 18:12:12 +01:00
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
2017-08-04 20:48:35 +02:00
|
|
|
[--reflector=<reflector>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
2017-08-04 20:48:35 +02:00
|
|
|
downloads folder
|
2018-02-24 19:13:29 +01:00
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--reflector=<reflector> : (str) reflector server, ip address or url
|
2017-08-04 20:48:35 +02:00
|
|
|
by default choose a server from the config
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-08-18 03:33:41 +02:00
|
|
|
Returns:
|
2017-08-04 20:48:35 +02:00
|
|
|
(list) list of blobs reflected
|
2016-08-18 03:33:41 +02:00
|
|
|
"""
|
2018-12-15 21:31:02 +01:00
|
|
|
lbry_files = await self._get_lbry_files(**kwargs)
|
2017-08-04 20:48:35 +02:00
|
|
|
if len(lbry_files) > 1:
|
|
|
|
raise Exception('Too many (%i) files found, need one' % len(lbry_files))
|
|
|
|
elif not lbry_files:
|
|
|
|
raise Exception('No file found')
|
2018-12-15 21:31:02 +01:00
|
|
|
return await d2f(reupload.reflect_file(
|
|
|
|
lbry_files[0], reflector_server=kwargs.get('reflector', None)
|
|
|
|
))
|
2016-08-18 03:33:41 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
|
2017-02-16 05:39:17 +01:00
|
|
|
finished=None, page_size=None, page=None):
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
|
2016-08-26 06:32:33 +02:00
|
|
|
|
2017-06-22 00:16:41 +02:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
|
|
|
|
[<stream_hash> | --stream_hash=<stream_hash>]
|
|
|
|
[<sd_hash> | --sd_hash=<sd_hash>]
|
|
|
|
[<page_size> | --page_size=<page_size>]
|
2017-06-22 00:16:41 +02:00
|
|
|
[<page> | --page=<page>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--needed : (bool) only return needed blobs
|
|
|
|
--finished : (bool) only return finished blobs
|
|
|
|
--uri=<uri> : (str) filter blobs by stream in a uri
|
|
|
|
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
|
|
|
|
--sd_hash=<sd_hash> : (str) filter blobs by sd hash
|
|
|
|
--page_size=<page_size> : (int) results page size
|
|
|
|
--page=<page> : (int) page of results to return
|
2017-06-22 00:16:41 +02:00
|
|
|
|
2016-08-26 06:32:33 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of blob hashes
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
2018-02-22 16:27:32 +01:00
|
|
|
if uri or stream_hash or sd_hash:
|
|
|
|
if uri:
|
2018-12-15 21:31:02 +01:00
|
|
|
metadata = (await self.wallet_manager.resolve(uri))[uri]
|
2018-02-22 16:27:32 +01:00
|
|
|
sd_hash = utils.get_sd_hash(metadata)
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
2018-02-22 16:27:32 +01:00
|
|
|
elif stream_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
2018-02-22 16:27:32 +01:00
|
|
|
elif sd_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
2018-02-22 16:27:32 +01:00
|
|
|
if stream_hash:
|
2018-12-15 21:31:02 +01:00
|
|
|
crypt_blobs = await self.storage.get_blobs_for_stream(stream_hash)
|
|
|
|
blobs = await d2f(defer.gatherResults([
|
2018-07-25 21:33:43 +02:00
|
|
|
self.blob_manager.get_blob(crypt_blob.blob_hash, crypt_blob.length)
|
2018-06-25 22:47:23 +02:00
|
|
|
for crypt_blob in crypt_blobs if crypt_blob.blob_hash is not None
|
2018-12-15 21:31:02 +01:00
|
|
|
]))
|
2018-02-22 16:27:32 +01:00
|
|
|
else:
|
2017-02-16 05:39:17 +01:00
|
|
|
blobs = []
|
2018-02-22 16:27:32 +01:00
|
|
|
# get_blobs_for_stream does not include the sd blob, so we'll add it manually
|
2018-07-25 21:33:43 +02:00
|
|
|
if sd_hash in self.blob_manager.blobs:
|
|
|
|
blobs = [self.blob_manager.blobs[sd_hash]] + blobs
|
2017-02-16 05:39:17 +01:00
|
|
|
else:
|
2018-07-12 05:18:59 +02:00
|
|
|
blobs = self.blob_manager.blobs.values()
|
2017-02-16 05:39:17 +01:00
|
|
|
|
|
|
|
if needed:
|
2017-09-20 20:02:34 +02:00
|
|
|
blobs = [blob for blob in blobs if not blob.get_is_verified()]
|
2017-02-16 05:39:17 +01:00
|
|
|
if finished:
|
2017-09-20 20:02:34 +02:00
|
|
|
blobs = [blob for blob in blobs if blob.get_is_verified()]
|
2017-02-16 05:39:17 +01:00
|
|
|
|
2018-02-22 16:27:32 +01:00
|
|
|
blob_hashes = [blob.blob_hash for blob in blobs if blob.blob_hash]
|
2017-02-16 05:39:17 +01:00
|
|
|
page_size = page_size or len(blob_hashes)
|
|
|
|
page = page or 0
|
|
|
|
start_index = page * page_size
|
|
|
|
stop_index = start_index + page_size
|
2018-09-21 15:47:06 +02:00
|
|
|
return blob_hashes[start_index:stop_index]
|
2016-08-27 01:58:53 +02:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT)
|
2019-01-11 00:40:20 +01:00
|
|
|
async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None):
|
2018-03-22 21:54:29 +01:00
|
|
|
"""
|
|
|
|
Reflects specified blobs
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>]
|
|
|
|
|
|
|
|
Options:
|
2018-04-12 19:27:06 +02:00
|
|
|
--reflector_server=<reflector_server> : (str) reflector address
|
2018-03-22 21:54:29 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) reflected blob hashes
|
|
|
|
"""
|
2019-01-11 00:40:20 +01:00
|
|
|
result = await d2f(reupload.reflect_blob_hashes(blob_hashes, self.blob_manager, reflector_server))
|
|
|
|
return result
|
2018-03-22 21:54:29 +01:00
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(BLOB_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_blob_reflect_all(self):
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
|
|
|
Reflects all saved blobs
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_reflect_all
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2016-08-27 01:58:53 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if successful
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
2018-12-15 21:31:02 +01:00
|
|
|
blob_hashes = await d2f(self.blob_manager.get_all_verified_blobs())
|
|
|
|
return await d2f(reupload.reflect_blob_hashes(blob_hashes, self.blob_manager))
|
2016-08-19 08:41:23 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT)
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_peer_ping(self, node_id, address=None, port=None):
|
2018-03-29 16:46:29 +02:00
|
|
|
"""
|
2018-07-11 21:16:01 +02:00
|
|
|
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
|
|
|
|
if not provided the peer is located first.
|
2018-03-29 16:46:29 +02:00
|
|
|
|
|
|
|
Usage:
|
2018-07-11 21:16:01 +02:00
|
|
|
peer_ping (<node_id> | --node_id=<node_id>) [<address> | --address=<address>] [<port> | --port=<port>]
|
2018-03-29 16:46:29 +02:00
|
|
|
|
2018-04-12 19:08:58 +02:00
|
|
|
Options:
|
2018-07-11 21:16:01 +02:00
|
|
|
--address=<address> : (str) ip address of the peer
|
|
|
|
--port=<port> : (int) udp port of the peer
|
|
|
|
|
2018-04-12 19:08:58 +02:00
|
|
|
|
2018-03-29 16:46:29 +02:00
|
|
|
Returns:
|
|
|
|
(str) pong, or {'error': <error message>} if an error is encountered
|
|
|
|
"""
|
|
|
|
contact = None
|
2018-07-11 21:16:01 +02:00
|
|
|
if node_id and address and port:
|
2018-08-10 18:32:54 +02:00
|
|
|
contact = self.dht_node.contact_manager.get_contact(unhexlify(node_id), address, int(port))
|
2018-07-11 21:16:01 +02:00
|
|
|
if not contact:
|
|
|
|
contact = self.dht_node.contact_manager.make_contact(
|
2018-08-10 18:32:54 +02:00
|
|
|
unhexlify(node_id), address, int(port), self.dht_node._protocol
|
2018-07-11 21:16:01 +02:00
|
|
|
)
|
|
|
|
if not contact:
|
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
contact = await d2f(self.dht_node.findContact(unhexlify(node_id)))
|
2018-07-11 21:16:01 +02:00
|
|
|
except TimeoutError:
|
2018-09-21 15:47:06 +02:00
|
|
|
return {'error': 'timeout finding peer'}
|
2018-03-29 16:46:29 +02:00
|
|
|
if not contact:
|
2018-09-21 15:47:06 +02:00
|
|
|
return {'error': 'peer not found'}
|
2018-03-29 16:46:29 +02:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
return (await d2f(contact.ping())).decode()
|
2018-03-29 16:46:29 +02:00
|
|
|
except TimeoutError:
|
2018-12-15 21:31:02 +01:00
|
|
|
return {'error': 'ping timeout'}
|
2018-03-29 16:46:29 +02:00
|
|
|
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT)
|
2017-10-10 21:04:48 +02:00
|
|
|
def jsonrpc_routing_table_get(self):
|
|
|
|
"""
|
|
|
|
Get DHT routing information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
routing_table_get
|
|
|
|
|
2018-02-24 19:13:29 +01:00
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
2017-10-10 21:04:48 +02:00
|
|
|
Returns:
|
|
|
|
(dict) dictionary containing routing and contact information
|
|
|
|
{
|
|
|
|
"buckets": {
|
|
|
|
<bucket index>: [
|
|
|
|
{
|
|
|
|
"address": (str) peer address,
|
2018-05-24 00:26:43 +02:00
|
|
|
"port": (int) peer udp port
|
2017-10-10 21:04:48 +02:00
|
|
|
"node_id": (str) peer node id,
|
|
|
|
"blobs": (list) blob hashes announced by peer
|
|
|
|
}
|
2017-10-11 21:14:29 +02:00
|
|
|
]
|
|
|
|
},
|
2017-10-10 21:04:48 +02:00
|
|
|
"contacts": (list) contact node ids,
|
2017-10-11 21:14:29 +02:00
|
|
|
"blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets,
|
2017-10-10 21:04:48 +02:00
|
|
|
"node_id": (str) the local dht node id
|
2017-10-11 21:14:29 +02:00
|
|
|
}
|
2017-10-10 21:04:48 +02:00
|
|
|
"""
|
|
|
|
result = {}
|
2018-08-10 18:32:54 +02:00
|
|
|
data_store = self.dht_node._dataStore
|
2017-10-10 21:04:48 +02:00
|
|
|
hosts = {}
|
|
|
|
|
2018-08-11 01:41:08 +02:00
|
|
|
for k, v in data_store.items():
|
2018-08-16 01:23:06 +02:00
|
|
|
for contact in map(itemgetter(0), v):
|
2018-08-11 01:41:08 +02:00
|
|
|
hosts.setdefault(contact, []).append(hexlify(k).decode())
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2018-08-10 18:32:54 +02:00
|
|
|
contact_set = set()
|
2018-08-11 01:41:08 +02:00
|
|
|
blob_hashes = set()
|
2017-10-10 21:04:48 +02:00
|
|
|
result['buckets'] = {}
|
|
|
|
|
2018-07-24 18:22:11 +02:00
|
|
|
for i in range(len(self.dht_node._routingTable._buckets)):
|
2018-11-13 15:36:52 +01:00
|
|
|
result['buckets'][i] = []
|
2018-07-24 18:22:11 +02:00
|
|
|
for contact in self.dht_node._routingTable._buckets[i]._contacts:
|
2018-08-16 01:23:06 +02:00
|
|
|
blobs = list(hosts.pop(contact)) if contact in hosts else []
|
2018-08-11 01:41:08 +02:00
|
|
|
blob_hashes.update(blobs)
|
2017-10-10 21:04:48 +02:00
|
|
|
host = {
|
|
|
|
"address": contact.address,
|
2018-05-24 00:26:43 +02:00
|
|
|
"port": contact.port,
|
2018-08-10 18:32:54 +02:00
|
|
|
"node_id": hexlify(contact.id).decode(),
|
2017-10-10 21:04:48 +02:00
|
|
|
"blobs": blobs,
|
|
|
|
}
|
2018-11-13 15:36:52 +01:00
|
|
|
result['buckets'][i].append(host)
|
2018-08-10 18:32:54 +02:00
|
|
|
contact_set.add(hexlify(contact.id).decode())
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2018-08-11 01:41:08 +02:00
|
|
|
result['contacts'] = list(contact_set)
|
|
|
|
result['blob_hashes'] = list(blob_hashes)
|
2018-08-10 18:32:54 +02:00
|
|
|
result['node_id'] = hexlify(self.dht_node.node_id).decode()
|
2018-12-15 21:31:02 +01:00
|
|
|
return result
|
2017-10-10 21:04:48 +02:00
|
|
|
|
2018-07-24 18:30:47 +02:00
|
|
|
# the single peer downloader needs wallet access
|
2018-07-25 00:35:18 +02:00
|
|
|
@requires(DHT_COMPONENT, WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2017-12-21 02:46:41 +01:00
|
|
|
def jsonrpc_blob_availability(self, blob_hash, search_timeout=None, blob_timeout=None):
|
|
|
|
"""
|
|
|
|
Get blob availability
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_availability (<blob_hash>) [<search_timeout> | --search_timeout=<search_timeout>]
|
|
|
|
[<blob_timeout> | --blob_timeout=<blob_timeout>]
|
|
|
|
|
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--blob_hash=<blob_hash> : (str) check availability for this blob hash
|
|
|
|
--search_timeout=<search_timeout> : (int) how long to search for peers for the blob
|
|
|
|
in the dht
|
|
|
|
--blob_timeout=<blob_timeout> : (int) how long to try downloading from a peer
|
2017-12-21 02:46:41 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) {
|
|
|
|
"is_available": <bool, true if blob is available from a peer from peer list>
|
|
|
|
"reachable_peers": ["<ip>:<port>"],
|
|
|
|
"unreachable_peers": ["<ip>:<port>"]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._blob_availability(blob_hash, search_timeout, blob_timeout)
|
|
|
|
|
2018-07-25 21:33:43 +02:00
|
|
|
@requires(UPNP_COMPONENT, WALLET_COMPONENT, DHT_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
|
2018-12-15 21:31:02 +01:00
|
|
|
async def jsonrpc_stream_availability(self, uri, search_timeout=None, blob_timeout=None):
|
2017-12-21 02:46:41 +01:00
|
|
|
"""
|
|
|
|
Get stream availability for lbry uri
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-12-21 02:46:41 +01:00
|
|
|
Usage:
|
2018-02-24 19:13:29 +01:00
|
|
|
stream_availability (<uri> | --uri=<uri>)
|
|
|
|
[<search_timeout> | --search_timeout=<search_timeout>]
|
2017-12-21 02:46:41 +01:00
|
|
|
[<blob_timeout> | --blob_timeout=<blob_timeout>]
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-12-21 02:46:41 +01:00
|
|
|
Options:
|
2018-02-24 19:13:29 +01:00
|
|
|
--uri=<uri> : (str) check availability for this uri
|
|
|
|
--search_timeout=<search_timeout> : (int) how long to search for peers for the blob
|
|
|
|
in the dht
|
2018-03-26 16:13:06 +02:00
|
|
|
--blob_timeout=<blob_timeout> : (int) how long to try downloading from a peer
|
2017-04-10 19:26:47 +02:00
|
|
|
|
2017-12-21 02:46:41 +01:00
|
|
|
Returns:
|
|
|
|
(dict) {
|
|
|
|
'is_available': <bool>,
|
|
|
|
'did_decode': <bool>,
|
|
|
|
'did_resolve': <bool>,
|
|
|
|
'is_stream': <bool>,
|
|
|
|
'num_blobs_in_stream': <int>,
|
|
|
|
'sd_hash': <str>,
|
|
|
|
'sd_blob_availability': <dict> see `blob_availability`,
|
|
|
|
'head_blob_hash': <str>,
|
|
|
|
'head_blob_availability': <dict> see `blob_availability`,
|
|
|
|
'use_upnp': <bool>,
|
|
|
|
'upnp_redirect_is_set': <bool>,
|
|
|
|
'error': <None> | <str> error message
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
search_timeout = search_timeout or conf.settings['peer_search_timeout']
|
|
|
|
blob_timeout = blob_timeout or conf.settings['sd_download_timeout']
|
|
|
|
|
|
|
|
response = {
|
|
|
|
'is_available': False,
|
|
|
|
'did_decode': False,
|
|
|
|
'did_resolve': False,
|
|
|
|
'is_stream': False,
|
|
|
|
'num_blobs_in_stream': None,
|
|
|
|
'sd_hash': None,
|
|
|
|
'sd_blob_availability': {},
|
|
|
|
'head_blob_hash': None,
|
|
|
|
'head_blob_availability': {},
|
|
|
|
'use_upnp': conf.settings['use_upnp'],
|
2018-08-05 19:12:39 +02:00
|
|
|
'upnp_redirect_is_set': len(self.upnp.upnp_redirects),
|
2017-12-21 02:46:41 +01:00
|
|
|
'error': None
|
|
|
|
}
|
2017-04-10 19:26:47 +02:00
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
resolved_result = (await self.wallet_manager.resolve(uri))[uri]
|
2017-12-21 02:46:41 +01:00
|
|
|
response['did_resolve'] = True
|
|
|
|
except UnknownNameError:
|
|
|
|
response['error'] = "Failed to resolve name"
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2017-12-21 02:46:41 +01:00
|
|
|
except URIParseError:
|
|
|
|
response['error'] = "Invalid URI"
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2017-04-10 19:26:47 +02:00
|
|
|
|
2017-12-21 02:46:41 +01:00
|
|
|
try:
|
|
|
|
claim_obj = smart_decode(resolved_result[uri]['claim']['hex'])
|
|
|
|
response['did_decode'] = True
|
|
|
|
except DecodeError:
|
|
|
|
response['error'] = "Failed to decode claim value"
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2017-12-21 02:46:41 +01:00
|
|
|
|
|
|
|
response['is_stream'] = claim_obj.is_stream
|
|
|
|
if not claim_obj.is_stream:
|
|
|
|
response['error'] = "Claim for \"%s\" does not contain a stream" % uri
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2017-12-21 02:46:41 +01:00
|
|
|
|
|
|
|
sd_hash = claim_obj.source_hash
|
|
|
|
response['sd_hash'] = sd_hash
|
|
|
|
head_blob_hash = None
|
|
|
|
downloader = self._get_single_peer_downloader()
|
2018-07-25 21:33:43 +02:00
|
|
|
have_sd_blob = sd_hash in self.blob_manager.blobs
|
2017-12-21 02:46:41 +01:00
|
|
|
try:
|
2018-12-15 21:31:02 +01:00
|
|
|
sd_blob = await self.jsonrpc_blob_get(sd_hash, timeout=blob_timeout, encoding="json")
|
2017-12-21 02:46:41 +01:00
|
|
|
if not have_sd_blob:
|
2018-12-15 21:31:02 +01:00
|
|
|
await self.jsonrpc_blob_delete(sd_hash)
|
2017-12-21 02:46:41 +01:00
|
|
|
if sd_blob and 'blobs' in sd_blob:
|
|
|
|
response['num_blobs_in_stream'] = len(sd_blob['blobs']) - 1
|
|
|
|
head_blob_hash = sd_blob['blobs'][0]['blob_hash']
|
2018-12-15 21:31:02 +01:00
|
|
|
head_blob_availability = await self._blob_availability(
|
|
|
|
head_blob_hash, search_timeout, blob_timeout, downloader)
|
2017-12-21 02:46:41 +01:00
|
|
|
response['head_blob_availability'] = head_blob_availability
|
|
|
|
except Exception as err:
|
|
|
|
response['error'] = err
|
|
|
|
response['head_blob_hash'] = head_blob_hash
|
2018-12-15 21:31:02 +01:00
|
|
|
response['sd_blob_availability'] = await self._blob_availability(
|
|
|
|
sd_hash, search_timeout, blob_timeout, downloader)
|
2017-12-21 02:46:41 +01:00
|
|
|
response['is_available'] = response['sd_blob_availability'].get('is_available') and \
|
|
|
|
response['head_blob_availability'].get('is_available')
|
2018-12-15 21:31:02 +01:00
|
|
|
return response
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
async def get_channel_or_error(
|
|
|
|
self, accounts: List[LBCAccount], channel_id: str = None, channel_name: str = None):
|
2018-09-19 04:23:41 +02:00
|
|
|
if channel_id is not None:
|
2018-10-16 21:04:20 +02:00
|
|
|
certificates = await self.wallet_manager.get_certificates(
|
2018-10-18 01:07:17 +02:00
|
|
|
private_key_accounts=accounts, claim_id=channel_id)
|
2018-09-19 04:23:41 +02:00
|
|
|
if not certificates:
|
|
|
|
raise ValueError("Couldn't find channel with claim_id '{}'." .format(channel_id))
|
|
|
|
return certificates[0]
|
2018-10-03 18:00:21 +02:00
|
|
|
if channel_name is not None:
|
2018-10-16 21:04:20 +02:00
|
|
|
certificates = await self.wallet_manager.get_certificates(
|
2018-10-18 01:07:17 +02:00
|
|
|
private_key_accounts=accounts, claim_name=channel_name)
|
2018-09-19 04:23:41 +02:00
|
|
|
if not certificates:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"Couldn't find channel with name '{channel_name}'.")
|
2018-09-19 04:23:41 +02:00
|
|
|
return certificates[0]
|
|
|
|
raise ValueError("Couldn't find channel because a channel name or channel_id was not provided.")
|
|
|
|
|
2018-09-21 15:53:18 +02:00
|
|
|
def get_account_or_default(self, account_id: str, argument_name: str = "account", lbc_only=True):
|
2018-09-19 15:58:50 +02:00
|
|
|
if account_id is None:
|
|
|
|
return self.default_account
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.get_account_or_error(account_id, argument_name, lbc_only)
|
2018-09-19 15:58:50 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
def get_accounts_or_all(self, account_ids: List[str]):
|
|
|
|
return [
|
|
|
|
self.get_account_or_error(account_id)
|
|
|
|
for account_id in account_ids
|
|
|
|
] if account_ids else self.default_wallet.accounts
|
|
|
|
|
2018-09-21 15:53:18 +02:00
|
|
|
def get_account_or_error(self, account_id: str, argument_name: str = "account", lbc_only=True):
|
2018-08-26 06:44:23 +02:00
|
|
|
for account in self.default_wallet.accounts:
|
2018-08-30 06:04:25 +02:00
|
|
|
if account.id == account_id:
|
2018-08-06 06:28:11 +02:00
|
|
|
if lbc_only and not isinstance(account, LBCAccount):
|
|
|
|
raise ValueError(
|
|
|
|
"Found '{}', but it's an {} ledger account. "
|
|
|
|
"'{}' requires specifying an LBC ledger account."
|
2018-09-21 15:47:06 +02:00
|
|
|
.format(account_id, account.ledger.symbol, argument_name)
|
2018-08-06 06:28:11 +02:00
|
|
|
)
|
|
|
|
return account
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"Couldn't find account: {account_id}.")
|
2018-08-06 08:53:27 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
@staticmethod
|
2018-10-03 22:38:47 +02:00
|
|
|
def get_dewies_or_error(argument: str, lbc: str):
|
|
|
|
try:
|
|
|
|
return lbc_to_dewies(lbc)
|
|
|
|
except ValueError as e:
|
|
|
|
raise ValueError("Invalid value for '{}': {}".format(argument, e.args[0]))
|
2018-07-26 05:29:13 +02:00
|
|
|
|
2017-06-01 18:10:19 +02:00
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
def loggly_time_string(dt):
|
|
|
|
formatted_dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
|
2017-01-03 20:13:01 +01:00
|
|
|
milliseconds = str(round(dt.microsecond * (10.0 ** -5), 3))
|
2018-09-21 22:28:24 +02:00
|
|
|
return urllib.parse.quote(formatted_dt + milliseconds + "Z")
|
2017-01-02 20:52:24 +01:00
|
|
|
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def get_loggly_query_string(installation_id):
|
2017-01-02 22:09:28 +01:00
|
|
|
base_loggly_search_url = "https://lbry.loggly.com/search#"
|
2017-01-02 20:52:24 +01:00
|
|
|
now = utils.now()
|
|
|
|
yesterday = now - utils.timedelta(days=1)
|
2017-01-02 22:09:28 +01:00
|
|
|
params = {
|
2017-02-02 16:23:17 +01:00
|
|
|
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
|
2017-01-02 22:09:28 +01:00
|
|
|
'from': loggly_time_string(yesterday),
|
|
|
|
'to': loggly_time_string(now)
|
|
|
|
}
|
2018-09-21 22:28:24 +02:00
|
|
|
data = urllib.parse.urlencode(params)
|
2017-01-02 22:09:28 +01:00
|
|
|
return base_loggly_search_url + data
|
2017-01-02 20:52:24 +01:00
|
|
|
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def report_bug_to_slack(message, installation_id, platform_name, app_version):
|
2017-02-09 04:01:56 +01:00
|
|
|
webhook = utils.deobfuscate(conf.settings['SLACK_WEBHOOK'])
|
2017-01-02 20:52:24 +01:00
|
|
|
payload_template = "os: %s\n version: %s\n<%s|loggly>\n%s"
|
|
|
|
payload_params = (
|
|
|
|
platform_name,
|
|
|
|
app_version,
|
2017-02-02 16:23:17 +01:00
|
|
|
get_loggly_query_string(installation_id),
|
2017-01-02 20:52:24 +01:00
|
|
|
message
|
|
|
|
)
|
|
|
|
payload = {
|
|
|
|
"text": payload_template % payload_params
|
|
|
|
}
|
|
|
|
requests.post(webhook, json.dumps(payload))
|
|
|
|
|
|
|
|
|
2017-02-02 05:40:03 +01:00
|
|
|
def get_lbry_file_search_value(search_fields):
|
2017-03-06 23:01:35 +01:00
|
|
|
for searchtype in FileID:
|
|
|
|
value = search_fields.get(searchtype, None)
|
|
|
|
if value is not None:
|
2016-10-20 21:52:37 +02:00
|
|
|
return searchtype, value
|
2018-10-18 12:42:45 +02:00
|
|
|
raise NoValidSearch(f'{search_fields} is missing a valid search type')
|
2017-02-16 05:39:17 +01:00
|
|
|
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
def iter_lbry_file_search_values(search_fields):
|
|
|
|
for searchtype in FileID:
|
|
|
|
value = search_fields.get(searchtype, None)
|
|
|
|
if value is not None:
|
|
|
|
yield searchtype, value
|
|
|
|
|
|
|
|
|
2018-05-21 06:24:18 +02:00
|
|
|
def create_key_getter(field):
|
|
|
|
search_path = field.split('.')
|
|
|
|
def key_getter(value):
|
|
|
|
for key in search_path:
|
|
|
|
try:
|
|
|
|
value = value[key]
|
|
|
|
except KeyError as e:
|
2018-07-22 03:12:33 +02:00
|
|
|
errmsg = "Failed to get '{}', key {} was not found."
|
|
|
|
raise Exception(errmsg.format(field, str(e)))
|
2018-05-21 06:24:18 +02:00
|
|
|
return value
|
|
|
|
return key_getter
|