lbry-sdk/lbry/extras/daemon/daemon.py

5510 lines
265 KiB
Python
Raw Normal View History

2020-03-11 01:39:42 +01:00
import linecache
2016-02-29 19:25:47 +01:00
import os
2019-10-14 10:17:37 +02:00
import re
2019-01-22 23:44:17 +01:00
import asyncio
import logging
import json
2019-02-11 00:36:21 +01:00
import time
2019-01-22 23:44:17 +01:00
import inspect
import typing
2019-01-30 20:59:48 +01:00
import random
2020-03-11 00:50:25 +01:00
import tracemalloc
from decimal import Decimal
2019-01-22 23:44:17 +01:00
from urllib.parse import urlencode, quote
from typing import Callable, Optional, List
from binascii import hexlify, unhexlify
from traceback import format_exc
from functools import wraps, partial
import base58
from aiohttp import web
2020-04-24 03:17:44 +02:00
from prometheus_client import generate_latest as prom_generate_latest, Gauge, Histogram, Counter
2019-05-05 23:04:06 +02:00
from google.protobuf.message import DecodeError
2020-12-24 05:55:58 +01:00
2020-01-03 04:18:49 +01:00
from lbry.wallet import (
Wallet, ENCRYPT_ON_DISK, SingleKey, HierarchicalDeterministic,
2020-03-07 06:34:47 +01:00
Transaction, Output, Input, Account, database
2020-01-03 04:18:49 +01:00
)
from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies, dict_values_to_lbc
2020-03-07 06:34:47 +01:00
from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPE_NAMES
from lbry.wallet.bip32 import PrivateKey
from lbry.crypto.base58 import Base58
2018-07-05 04:16:02 +02:00
2019-06-21 02:55:47 +02:00
from lbry import utils
from lbry.conf import Config, Setting, NOT_SET
2019-06-21 02:55:47 +02:00
from lbry.blob.blob_file import is_valid_blobhash, BlobBuffer
from lbry.blob_exchange.downloader import download_blob
2019-10-01 02:00:10 +02:00
from lbry.dht.peer import make_kademlia_peer
from lbry.error import (
DownloadSDTimeoutError, ComponentsNotStartedError, ComponentStartConditionNotMetError,
CommandDoesNotExistError, BaseError, WalletNotFoundError, WalletAlreadyLoadedError, WalletAlreadyExistsError,
ConflictingInputValueError, AlreadyPurchasedError, PrivateKeyNotFoundError, InputStringIsBlankError,
InputValueError
)
2019-06-21 02:55:47 +02:00
from lbry.extras import system_info
from lbry.extras.daemon import analytics
from lbry.extras.daemon.components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
2022-04-04 04:20:02 +02:00
from lbry.extras.daemon.components import FILE_MANAGER_COMPONENT, DISK_SPACE_COMPONENT, TRACKER_ANNOUNCER_COMPONENT
from lbry.extras.daemon.components import EXCHANGE_RATE_MANAGER_COMPONENT, UPNP_COMPONENT
from lbry.extras.daemon.componentmanager import RequiredCondition
from lbry.extras.daemon.componentmanager import ComponentManager
2019-06-21 02:55:47 +02:00
from lbry.extras.daemon.json_response_encoder import JSONResponseEncoder
from lbry.extras.daemon.undecorated import undecorated
from lbry.extras.daemon.security import ensure_request_allowed
from lbry.file_analysis import VideoFileAnalyzer
2019-06-21 02:55:47 +02:00
from lbry.schema.claim import Claim
2021-08-13 23:36:08 +02:00
from lbry.schema.url import URL, normalize_name
2019-01-22 23:44:17 +01:00
if typing.TYPE_CHECKING:
2019-06-21 02:55:47 +02:00
from lbry.blob.blob_manager import BlobManager
from lbry.dht.node import Node
2021-08-16 20:15:12 +02:00
from lbry.extras.daemon.components import UPnPComponent, DiskSpaceManager
2019-06-21 02:55:47 +02:00
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
from lbry.extras.daemon.storage import SQLiteStorage
from lbry.wallet import WalletManager, Ledger
2020-02-07 16:32:39 +01:00
from lbry.file.file_manager import FileManager
2017-04-11 04:47:54 +02:00
log = logging.getLogger(__name__)
2018-12-13 04:32:44 +01:00
RANGE_FIELDS = {
'height', 'creation_height', 'activation_height', 'expiration_height',
'timestamp', 'creation_timestamp', 'duration', 'release_time', 'fee_amount',
'tx_position', 'repost_count', 'limit_claims_per_channel',
'amount', 'effective_amount', 'support_amount',
'trending_score', 'censor_type', 'tx_num'
}
MY_RANGE_FIELDS = RANGE_FIELDS - {"limit_claims_per_channel"}
REPLACEMENTS = {
'claim_name': 'normalized_name',
'name': 'normalized_name',
'txid': 'tx_id',
'nout': 'tx_nout',
'trending_group': 'trending_score',
'trending_mixed': 'trending_score',
'trending_global': 'trending_score',
'trending_local': 'trending_score',
'reposted': 'repost_count',
'stream_types': 'stream_type',
'media_types': 'media_type',
'valid_channel_signature': 'is_signature_valid'
}
2018-12-13 04:32:44 +01:00
def is_transactional_function(name):
for action in ('create', 'update', 'abandon', 'send', 'fund'):
if action in name:
return True
2018-12-13 04:32:44 +01:00
def requires(*components, **conditions):
if conditions and ["conditions"] != list(conditions.keys()):
raise SyntaxError("invalid conditions argument")
condition_names = conditions.get("conditions", [])
def _wrap(method):
@wraps(method)
2018-12-13 04:32:44 +01:00
def _inner(*args, **kwargs):
component_manager = args[0].component_manager
for condition_name in condition_names:
condition_result, err_msg = component_manager.evaluate_condition(condition_name)
if not condition_result:
raise ComponentStartConditionNotMetError(err_msg)
2018-12-13 04:32:44 +01:00
if not component_manager.all_components_running(*components):
raise ComponentsNotStartedError(
f"the following required components have not yet started: {json.dumps(components)}"
)
return method(*args, **kwargs)
2019-10-04 15:52:29 +02:00
2018-12-13 04:32:44 +01:00
return _inner
2019-10-04 15:52:29 +02:00
2018-12-13 04:32:44 +01:00
return _wrap
def deprecated(new_command=None):
def _deprecated_wrapper(f):
f.new_command = new_command
f._deprecated = True
return f
2019-10-04 15:52:29 +02:00
2018-12-13 04:32:44 +01:00
return _deprecated_wrapper
2016-06-28 20:28:59 +02:00
INITIALIZING_CODE = 'initializing'
# TODO: make this consistent with the stages in Downloader.py
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [
2017-01-04 23:10:36 +01:00
(INITIALIZING_CODE, 'Initializing'),
2017-01-03 20:13:01 +01:00
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
]
2016-12-19 19:27:45 +01:00
SHORT_ID_LEN = 20
MAX_UPDATE_FEE_ESTIMATE = 0.3
DEFAULT_PAGE_SIZE = 20
2016-12-19 19:27:45 +01:00
2019-10-14 10:17:37 +02:00
VALID_FULL_CLAIM_ID = re.compile('[0-9a-fA-F]{40}')
2018-08-16 01:23:06 +02:00
2019-04-06 21:33:07 +02:00
def encode_pagination_doc(items):
return {
"page": "Page number of the current items.",
"page_size": "Number of items to show on a page.",
"total_pages": "Total number of pages.",
2019-05-28 04:20:21 +02:00
"total_items": "Total number of items.",
2019-04-06 21:33:07 +02:00
"items": [items],
}
2020-03-21 23:06:05 +01:00
async def paginate_rows(get_records: Callable, get_record_count: Optional[Callable],
page: Optional[int], page_size: Optional[int], **constraints):
page = max(1, page or 1)
page_size = max(1, page_size or DEFAULT_PAGE_SIZE)
2019-10-18 01:01:40 +02:00
constraints.update({
"offset": page_size * (page - 1),
"limit": page_size
})
items = await get_records(**constraints)
2020-03-21 23:06:05 +01:00
result = {"items": items, "page": page, "page_size": page_size}
if get_record_count is not None:
total_items = await get_record_count(**constraints)
result["total_pages"] = int((total_items + (page_size - 1)) / page_size)
result["total_items"] = total_items
return result
def paginate_list(items: List, page: Optional[int], page_size: Optional[int]):
page = max(1, page or 1)
page_size = max(1, page_size or DEFAULT_PAGE_SIZE)
total_items = len(items)
offset = page_size * (page - 1)
subitems = []
if offset <= total_items:
subitems = items[offset:offset+page_size]
return {
"items": subitems,
"total_pages": int((total_items + (page_size - 1)) / page_size),
"total_items": total_items,
2019-10-18 01:01:40 +02:00
"page": page, "page_size": page_size
}
def fix_kwargs_for_hub(**kwargs):
repeated_fields = {"media_type", "stream_type", "claim_type"}
2021-08-12 18:42:37 +02:00
value_fields = {"tx_nout", "has_source", "is_signature_valid"}
2021-08-13 23:36:08 +02:00
opcodes = {'=': 0, '<=': 1, '>=': 2, '<': 3, '>': 4}
2021-08-14 00:58:58 +02:00
for key, value in list(kwargs.items()):
2021-08-27 07:35:18 +02:00
if value in (None, [], False):
kwargs.pop(key)
continue
2021-08-14 00:58:58 +02:00
if key in REPLACEMENTS:
kwargs[REPLACEMENTS[key]] = kwargs.pop(key)
key = REPLACEMENTS[key]
if key == "normalized_name":
kwargs[key] = normalize_name(value)
2021-08-12 18:42:37 +02:00
if key == "limit_claims_per_channel":
value = kwargs.pop("limit_claims_per_channel") or 0
if value > 0:
kwargs["limit_claims_per_channel"] = value
2021-08-14 00:58:58 +02:00
elif key == "invalid_channel_signature":
kwargs["is_signature_valid"] = {"value": not kwargs.pop("invalid_channel_signature")}
elif key == "has_no_source":
kwargs["has_source"] = {"value": not kwargs.pop("has_no_source")}
elif key in value_fields:
kwargs[key] = {"value": value} if not isinstance(value, dict) else value
2021-08-13 23:36:08 +02:00
elif key in repeated_fields and isinstance(value, str):
kwargs[key] = [value]
2021-08-13 23:36:08 +02:00
elif key in ("claim_id", "channel_id"):
2021-08-14 00:58:58 +02:00
kwargs[key] = {"invert": False, "value": [kwargs[key]]}
2021-08-13 23:36:08 +02:00
elif key in ("claim_ids", "channel_ids"):
2021-08-14 00:58:58 +02:00
kwargs[key[:-1]] = {"invert": False, "value": kwargs.pop(key)}
2021-08-13 23:36:08 +02:00
elif key == "not_channel_ids":
2021-08-14 00:58:58 +02:00
kwargs["channel_id"] = {"invert": True, "value": kwargs.pop("not_channel_ids")}
elif key in MY_RANGE_FIELDS:
constraints = []
for val in value if isinstance(value, list) else [value]:
operator = '='
if isinstance(val, str) and val[0] in opcodes:
operator_length = 2 if val[:2] in opcodes else 1
operator, val = val[:operator_length], val[operator_length:]
2021-12-04 00:12:38 +01:00
val = [int(val if key != 'fee_amount' else Decimal(val)*1000)]
constraints.append({"op": opcodes[operator], "value": val})
kwargs[key] = constraints
elif key == 'order_by': # TODO: remove this after removing support for old trending args from the api
value = value if isinstance(value, list) else [value]
new_value = []
for new_v in value:
migrated = new_v if new_v not in (
'trending_mixed', 'trending_local', 'trending_global', 'trending_group'
) else 'trending_score'
if migrated not in new_value:
new_value.append(migrated)
kwargs[key] = new_value
return kwargs
2021-08-13 23:36:08 +02:00
DHT_HAS_CONTACTS = "dht_has_contacts"
class DHTHasContacts(RequiredCondition):
name = DHT_HAS_CONTACTS
component = DHT_COMPONENT
message = "your node is not connected to the dht"
@staticmethod
def evaluate(component):
return len(component.contacts) > 0
2018-12-13 04:32:44 +01:00
class JSONRPCError:
# http://www.jsonrpc.org/specification#error_object
CODE_PARSE_ERROR = -32700 # Invalid JSON. Error while parsing the JSON text.
CODE_INVALID_REQUEST = -32600 # The JSON sent is not a valid Request object.
CODE_METHOD_NOT_FOUND = -32601 # The method does not exist / is not available.
CODE_INVALID_PARAMS = -32602 # Invalid method parameter(s).
CODE_INTERNAL_ERROR = -32603 # Internal JSON-RPC error (I think this is like a 500?)
CODE_APPLICATION_ERROR = -32500 # Generic error with our app??
CODE_AUTHENTICATION_ERROR = -32501 # Authentication failed
MESSAGES = {
CODE_PARSE_ERROR: "Parse Error. Data is not valid JSON.",
CODE_INVALID_REQUEST: "JSON data is not a valid Request",
CODE_METHOD_NOT_FOUND: "Method Not Found",
CODE_INVALID_PARAMS: "Invalid Params",
CODE_INTERNAL_ERROR: "Internal Error",
CODE_AUTHENTICATION_ERROR: "Authentication Failed",
}
HTTP_CODES = {
CODE_INVALID_REQUEST: 400,
CODE_PARSE_ERROR: 400,
CODE_INVALID_PARAMS: 400,
CODE_METHOD_NOT_FOUND: 404,
CODE_INTERNAL_ERROR: 500,
CODE_APPLICATION_ERROR: 500,
CODE_AUTHENTICATION_ERROR: 401,
}
2019-12-20 22:21:32 +01:00
def __init__(self, code: int, message: str, data: dict = None):
assert code and isinstance(code, int), "'code' must be an int"
assert message and isinstance(message, str), "'message' must be a string"
assert data is None or isinstance(data, dict), "'data' must be None or a dict"
2018-12-13 04:32:44 +01:00
self.code = code
self.message = message
self.data = data or {}
2018-12-13 04:32:44 +01:00
def to_dict(self):
return {
'code': self.code,
'message': self.message,
'data': self.data,
2018-12-13 04:32:44 +01:00
}
@staticmethod
def filter_traceback(traceback):
result = []
if traceback is not None:
result = trace_lines = traceback.split("\n")
for i, t in enumerate(trace_lines):
if "--- <exception caught here> ---" in t:
if len(trace_lines) > i + 1:
result = [j for j in trace_lines[i + 1:] if j]
break
return result
2018-12-13 04:32:44 +01:00
@classmethod
def create_command_exception(cls, command, args, kwargs, exception, traceback):
if 'password' in kwargs and isinstance(kwargs['password'], str):
kwargs['password'] = '*'*len(kwargs['password'])
return cls(
cls.CODE_APPLICATION_ERROR, str(exception), {
'name': exception.__class__.__name__,
'traceback': cls.filter_traceback(traceback),
'command': command,
'args': args,
'kwargs': kwargs,
}
)
2018-12-13 04:32:44 +01:00
class UnknownAPIMethodError(Exception):
pass
def jsonrpc_dumps_pretty(obj, **kwargs):
if isinstance(obj, JSONRPCError):
data = {"jsonrpc": "2.0", "error": obj.to_dict()}
else:
data = {"jsonrpc": "2.0", "result": obj}
return json.dumps(data, cls=JSONResponseEncoder, sort_keys=True, indent=2, **kwargs) + "\n"
def trap(err, *to_trap):
err.trap(*to_trap)
class JSONRPCServerType(type):
def __new__(mcs, name, bases, newattrs):
klass = type.__new__(mcs, name, bases, newattrs)
klass.callable_methods = {}
klass.deprecated_methods = {}
for methodname in dir(klass):
if methodname.startswith("jsonrpc_"):
method = getattr(klass, methodname)
if not hasattr(method, '_deprecated'):
klass.callable_methods.update({methodname.split("jsonrpc_")[1]: method})
else:
klass.deprecated_methods.update({methodname.split("jsonrpc_")[1]: method})
return klass
2020-05-03 04:30:25 +02:00
HISTOGRAM_BUCKETS = (
.005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, 2.5, 5.0, 7.5, 10.0, 15.0, 20.0, 30.0, 60.0, float('inf')
)
2018-12-13 04:32:44 +01:00
class Daemon(metaclass=JSONRPCServerType):
"""
2016-03-24 03:27:48 +01:00
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
callable_methods: dict
deprecated_methods: dict
2018-12-13 04:32:44 +01:00
2020-04-24 03:17:44 +02:00
pending_requests_metric = Gauge(
"pending_requests", "Number of running api requests", namespace="daemon_api",
labelnames=("method",)
)
requests_count_metric = Counter(
"requests_count", "Number of requests received", namespace="daemon_api",
labelnames=("method",)
)
failed_request_metric = Counter(
"failed_request_count", "Number of failed requests", namespace="daemon_api",
labelnames=("method",)
)
cancelled_request_metric = Counter(
"cancelled_request_count", "Number of cancelled requests", namespace="daemon_api",
labelnames=("method",)
)
2020-04-24 03:17:44 +02:00
response_time_metric = Histogram(
2020-05-03 04:30:25 +02:00
"response_time", "Response times", namespace="daemon_api", buckets=HISTOGRAM_BUCKETS,
2020-04-24 03:17:44 +02:00
labelnames=("method",)
)
2019-01-24 00:04:16 +01:00
def __init__(self, conf: Config, component_manager: typing.Optional[ComponentManager] = None):
2019-01-21 21:55:50 +01:00
self.conf = conf
2020-02-13 16:04:50 +01:00
self.platform_info = system_info.get_platform()
self._video_file_analyzer = VideoFileAnalyzer(conf)
2019-01-21 21:55:50 +01:00
self._node_id = None
self._installation_id = None
self.session_id = base58.b58encode(utils.generate_id()).decode()
self.analytics_manager = analytics.AnalyticsManager(conf, self.installation_id, self.session_id)
2018-12-13 04:32:44 +01:00
self.component_manager = component_manager or ComponentManager(
conf, analytics_manager=self.analytics_manager,
skip_components=conf.components_to_skip or []
2018-12-13 04:32:44 +01:00
)
self.component_startup_task = None
2016-05-14 23:36:30 +02:00
2019-01-09 05:54:18 +01:00
logging.getLogger('aiohttp.access').setLevel(logging.WARN)
rpc_app = web.Application()
rpc_app.router.add_get('/lbryapi', self.handle_old_jsonrpc)
rpc_app.router.add_post('/lbryapi', self.handle_old_jsonrpc)
rpc_app.router.add_post('/', self.handle_old_jsonrpc)
2021-03-28 05:56:19 +02:00
rpc_app.router.add_options('/', self.add_cors_headers)
self.rpc_runner = web.AppRunner(rpc_app)
streaming_app = web.Application()
streaming_app.router.add_get('/get/{claim_name}', self.handle_stream_get_request)
streaming_app.router.add_get('/get/{claim_name}/{claim_id}', self.handle_stream_get_request)
streaming_app.router.add_get('/stream/{sd_hash}', self.handle_stream_range_request)
self.streaming_runner = web.AppRunner(streaming_app)
2019-01-22 23:44:17 +01:00
prom_app = web.Application()
prom_app.router.add_get('/metrics', self.handle_metrics_get_request)
self.metrics_runner = web.AppRunner(prom_app)
2019-01-22 23:44:17 +01:00
@property
def dht_node(self) -> typing.Optional['Node']:
return self.component_manager.get_component(DHT_COMPONENT)
@property
2020-01-03 04:18:49 +01:00
def wallet_manager(self) -> typing.Optional['WalletManager']:
2019-01-22 23:44:17 +01:00
return self.component_manager.get_component(WALLET_COMPONENT)
@property
def storage(self) -> typing.Optional['SQLiteStorage']:
return self.component_manager.get_component(DATABASE_COMPONENT)
@property
2020-02-07 16:32:39 +01:00
def file_manager(self) -> typing.Optional['FileManager']:
return self.component_manager.get_component(FILE_MANAGER_COMPONENT)
2019-01-22 23:44:17 +01:00
@property
def exchange_rate_manager(self) -> typing.Optional['ExchangeRateManager']:
return self.component_manager.get_component(EXCHANGE_RATE_MANAGER_COMPONENT)
@property
2019-03-28 19:51:55 +01:00
def blob_manager(self) -> typing.Optional['BlobManager']:
2019-01-22 23:44:17 +01:00
return self.component_manager.get_component(BLOB_COMPONENT)
2021-08-16 20:15:12 +02:00
@property
def disk_space_manager(self) -> typing.Optional['DiskSpaceManager']:
return self.component_manager.get_component(DISK_SPACE_COMPONENT)
2019-01-22 23:44:17 +01:00
@property
def upnp(self) -> typing.Optional['UPnPComponent']:
return self.component_manager.get_component(UPNP_COMPONENT)
2018-12-13 04:32:44 +01:00
2019-01-21 21:55:50 +01:00
@classmethod
def get_api_definitions(cls):
2019-01-23 19:00:58 +01:00
prefix = 'jsonrpc_'
2020-02-20 23:27:18 +01:00
not_grouped = ['routing_table_get', 'ffmpeg_find']
2019-01-23 19:00:58 +01:00
api = {
'groups': {
group_name[:-len('_DOC')].lower(): getattr(cls, group_name).strip()
for group_name in dir(cls) if group_name.endswith('_DOC')
},
'commands': {}
}
for jsonrpc_method in dir(cls):
if jsonrpc_method.startswith(prefix):
full_name = jsonrpc_method[len(prefix):]
method = getattr(cls, jsonrpc_method)
if full_name in not_grouped:
name_parts = [full_name]
else:
name_parts = full_name.split('_', 1)
if len(name_parts) == 1:
group = None
name, = name_parts
elif len(name_parts) == 2:
group, name = name_parts
2019-10-04 15:52:29 +02:00
assert group in api['groups'], \
2019-01-23 19:00:58 +01:00
f"Group {group} does not have doc string for command {full_name}."
else:
raise NameError(f'Could not parse method name: {jsonrpc_method}')
api['commands'][full_name] = {
'api_method_name': full_name,
'name': name,
'group': group,
'doc': method.__doc__,
'method': method,
}
if hasattr(method, '_deprecated'):
api['commands'][full_name]['replaced_by'] = method.new_command
for command in api['commands'].values():
if 'replaced_by' in command:
command['replaced_by'] = api['commands'][command['replaced_by']]
return api
2019-01-21 21:55:50 +01:00
@property
def db_revision_file_path(self):
return os.path.join(self.conf.data_dir, 'db_revision')
@property
def installation_id(self):
install_id_filename = os.path.join(self.conf.data_dir, "install_id")
if not self._installation_id:
if os.path.isfile(install_id_filename):
with open(install_id_filename, "r") as install_id_file:
self._installation_id = str(install_id_file.read()).strip()
if not self._installation_id:
self._installation_id = base58.b58encode(utils.generate_id()).decode()
with open(install_id_filename, "w") as install_id_file:
install_id_file.write(self._installation_id)
return self._installation_id
def ensure_data_dir(self):
if not os.path.isdir(self.conf.data_dir):
os.makedirs(self.conf.data_dir)
if not os.path.isdir(os.path.join(self.conf.data_dir, "blobfiles")):
os.makedirs(os.path.join(self.conf.data_dir, "blobfiles"))
return self.conf.data_dir
def ensure_wallet_dir(self):
if not os.path.isdir(self.conf.wallet_dir):
os.makedirs(self.conf.wallet_dir)
def ensure_download_dir(self):
if not os.path.isdir(self.conf.download_dir):
os.makedirs(self.conf.download_dir)
2019-01-23 22:41:14 +01:00
async def start(self):
log.info("Starting LBRYNet Daemon")
log.debug("Settings: %s", json.dumps(self.conf.settings_dict, indent=2))
2020-02-13 16:04:50 +01:00
log.info("Platform: %s", json.dumps(self.platform_info, indent=2))
await self.analytics_manager.send_server_startup()
await self.rpc_runner.setup()
await self.streaming_runner.setup()
await self.metrics_runner.setup()
2018-12-13 04:32:44 +01:00
try:
rpc_site = web.TCPSite(self.rpc_runner, self.conf.api_host, self.conf.api_port, shutdown_timeout=.5)
await rpc_site.start()
2019-06-18 14:53:59 +02:00
log.info('RPC server listening on TCP %s:%i', *rpc_site._server.sockets[0].getsockname()[:2])
except OSError as e:
log.error('RPC server failed to bind TCP %s:%i', self.conf.api_host, self.conf.api_port)
await self.analytics_manager.send_server_startup_error(str(e))
raise SystemExit()
2019-06-18 14:53:59 +02:00
try:
streaming_site = web.TCPSite(self.streaming_runner, self.conf.streaming_host, self.conf.streaming_port,
shutdown_timeout=.5)
await streaming_site.start()
2019-06-18 14:53:59 +02:00
log.info('media server listening on TCP %s:%i', *streaming_site._server.sockets[0].getsockname()[:2])
except OSError as e:
2019-06-18 14:53:59 +02:00
log.error('media server failed to bind TCP %s:%i', self.conf.streaming_host, self.conf.streaming_port)
await self.analytics_manager.send_server_startup_error(str(e))
raise SystemExit()
if self.conf.prometheus_port:
try:
prom_site = web.TCPSite(self.metrics_runner, "0.0.0.0", self.conf.prometheus_port, shutdown_timeout=.5)
await prom_site.start()
log.info('metrics server listening on TCP %s:%i', *prom_site._server.sockets[0].getsockname()[:2])
except OSError as e:
log.error('metrics server failed to bind TCP :%i', self.conf.prometheus_port)
await self.analytics_manager.send_server_startup_error(str(e))
raise SystemExit()
2019-01-22 23:44:17 +01:00
try:
await self.initialize()
2019-01-22 23:44:17 +01:00
except asyncio.CancelledError:
2018-12-13 04:32:44 +01:00
log.info("shutting down before finished starting")
await self.analytics_manager.send_server_startup_error("shutting down before finished starting")
raise
except Exception as e:
await self.analytics_manager.send_server_startup_error(str(e))
2019-06-18 14:53:59 +02:00
log.exception('Failed to start lbrynet')
2019-06-19 08:20:40 +02:00
raise SystemExit()
2018-12-13 04:32:44 +01:00
await self.analytics_manager.send_server_startup_success()
2018-12-13 04:32:44 +01:00
async def initialize(self):
2019-01-22 18:11:28 +01:00
self.ensure_data_dir()
self.ensure_wallet_dir()
self.ensure_download_dir()
if not self.analytics_manager.is_started:
2019-03-11 02:55:33 +01:00
await self.analytics_manager.start()
self.component_startup_task = asyncio.create_task(self.component_manager.start())
await self.component_startup_task
async def stop(self):
if self.component_startup_task is not None:
if self.component_startup_task.done():
await self.component_manager.stop()
else:
self.component_startup_task.cancel()
2020-10-20 22:07:18 +02:00
# the wallet component might have not started
try:
wallet_component = self.component_manager.get_actual_component('wallet')
except NameError:
pass
else:
await wallet_component.stop()
await self.component_manager.stop()
2019-05-03 20:53:23 +02:00
log.info("stopped api components")
await self.rpc_runner.cleanup()
await self.streaming_runner.cleanup()
await self.metrics_runner.cleanup()
2019-05-03 20:53:23 +02:00
log.info("stopped api server")
if self.analytics_manager.is_started:
2019-01-22 05:28:26 +01:00
self.analytics_manager.stop()
2019-05-03 20:53:23 +02:00
log.info("finished shutting down")
2018-12-13 04:32:44 +01:00
2021-03-28 05:56:19 +02:00
async def add_cors_headers(self, request):
if self.conf.allowed_origin:
return web.Response(
2021-03-28 05:56:19 +02:00
headers={
'Access-Control-Allow-Origin': self.conf.allowed_origin,
'Access-Control-Allow-Methods': self.conf.allowed_origin,
'Access-Control-Allow-Headers': self.conf.allowed_origin,
}
)
return None
2018-12-13 04:32:44 +01:00
async def handle_old_jsonrpc(self, request):
ensure_request_allowed(request, self.conf)
2018-12-13 04:32:44 +01:00
data = await request.json()
params = data.get('params', {})
include_protobuf = params.pop('include_protobuf', False) if isinstance(params, dict) else False
result = await self._process_rpc_call(data)
2019-01-24 00:04:16 +01:00
ledger = None
if 'wallet' in self.component_manager.get_components_status():
# self.ledger only available if wallet component is not skipped
ledger = self.ledger
try:
encoded_result = jsonrpc_dumps_pretty(
result, ledger=ledger, include_protobuf=include_protobuf)
except Exception:
log.exception('Failed to encode JSON RPC result:')
encoded_result = jsonrpc_dumps_pretty(JSONRPCError(
JSONRPCError.CODE_APPLICATION_ERROR,
'After successfully executing the command, failed to encode result for JSON RPC response.',
{'traceback': format_exc()}
), ledger=ledger)
2021-03-28 05:56:19 +02:00
headers = {}
if self.conf.allowed_origin:
headers.update({
'Access-Control-Allow-Origin': self.conf.allowed_origin,
'Access-Control-Allow-Methods': self.conf.allowed_origin,
'Access-Control-Allow-Headers': self.conf.allowed_origin,
})
return web.Response(
text=encoded_result,
2021-03-28 05:56:19 +02:00
headers=headers,
content_type='application/json'
)
async def handle_metrics_get_request(self, request: web.Request):
try:
return web.Response(
text=prom_generate_latest().decode(),
content_type='text/plain; version=0.0.4'
)
except Exception:
log.exception('could not generate prometheus data')
raise
async def handle_stream_get_request(self, request: web.Request):
if not self.conf.streaming_get:
log.warning("streaming_get is disabled, rejecting request")
raise web.HTTPForbidden()
name_and_claim_id = request.path.split("/get/")[1]
if "/" not in name_and_claim_id:
uri = f"lbry://{name_and_claim_id}"
else:
name, claim_id = name_and_claim_id.split("/")
uri = f"lbry://{name}#{claim_id}"
if not self.file_manager.started.is_set():
await self.file_manager.started.wait()
stream = await self.jsonrpc_get(uri)
if isinstance(stream, dict):
raise web.HTTPServerError(text=stream['error'])
2019-04-16 00:17:43 +02:00
raise web.HTTPFound(f"/stream/{stream.sd_hash}")
async def handle_stream_range_request(self, request: web.Request):
try:
return await self._handle_stream_range_request(request)
except web.HTTPException as err:
log.warning("http code during /stream range request: %s", err)
raise err
except asyncio.CancelledError:
# if not excepted here, it would bubble up the error to the console. every time you closed
# a running tab, you'd get this error in the console
log.debug("/stream range request cancelled")
except Exception:
log.exception("error handling /stream range request")
raise
finally:
log.debug("finished handling /stream range request")
async def _handle_stream_range_request(self, request: web.Request):
sd_hash = request.path.split("/stream/")[1]
if not self.file_manager.started.is_set():
await self.file_manager.started.wait()
if sd_hash not in self.file_manager.streams:
return web.HTTPNotFound()
return await self.file_manager.stream_partial_content(request, sd_hash)
async def _process_rpc_call(self, data):
2018-12-13 04:32:44 +01:00
args = data.get('params', {})
try:
function_name = data['method']
except KeyError:
return JSONRPCError(
JSONRPCError.CODE_METHOD_NOT_FOUND,
"Missing 'method' value in request."
)
2018-12-13 04:32:44 +01:00
try:
method = self._get_jsonrpc_method(function_name)
2018-12-13 04:32:44 +01:00
except UnknownAPIMethodError:
return JSONRPCError(
JSONRPCError.CODE_METHOD_NOT_FOUND,
str(CommandDoesNotExistError(function_name))
)
2018-12-13 04:32:44 +01:00
2019-01-22 23:44:17 +01:00
if args in ([{}], []):
2018-12-13 04:32:44 +01:00
_args, _kwargs = (), {}
elif isinstance(args, dict):
_args, _kwargs = (), args
elif isinstance(args, list) and len(args) == 1 and isinstance(args[0], dict):
2018-12-13 04:32:44 +01:00
# TODO: this is for backwards compatibility. Remove this once API and UI are updated
# TODO: also delete EMPTY_PARAMS then
_args, _kwargs = (), args[0]
elif isinstance(args, list) and len(args) == 2 and \
isinstance(args[0], list) and isinstance(args[1], dict):
2018-12-13 04:32:44 +01:00
_args, _kwargs = args
else:
return JSONRPCError(
JSONRPCError.CODE_INVALID_PARAMS,
f"Invalid parameters format: {args}"
)
2018-12-13 04:32:44 +01:00
if is_transactional_function(function_name):
log.info("%s %s %s", function_name, _args, _kwargs)
params_error, erroneous_params = self._check_params(method, _args, _kwargs)
2018-12-13 04:32:44 +01:00
if params_error is not None:
params_error_message = '{} for {} command: {}'.format(
params_error, function_name, ', '.join(erroneous_params)
)
log.warning(params_error_message)
return JSONRPCError(
JSONRPCError.CODE_INVALID_PARAMS,
params_error_message,
)
2020-04-24 03:17:44 +02:00
self.pending_requests_metric.labels(method=function_name).inc()
self.requests_count_metric.labels(method=function_name).inc()
start = time.perf_counter()
try:
result = method(self, *_args, **_kwargs)
if asyncio.iscoroutine(result):
result = await result
return result
except asyncio.CancelledError:
self.cancelled_request_metric.labels(method=function_name).inc()
log.info("cancelled API call for: %s", function_name)
raise
except Exception as e: # pylint: disable=broad-except
self.failed_request_metric.labels(method=function_name).inc()
if not isinstance(e, BaseError):
log.exception("error handling api request")
else:
log.error("error handling api request: %s", e)
return JSONRPCError.create_command_exception(
command=function_name, args=_args, kwargs=_kwargs, exception=e, traceback=format_exc()
)
2020-04-24 03:17:44 +02:00
finally:
self.pending_requests_metric.labels(method=function_name).dec()
self.response_time_metric.labels(method=function_name).observe(time.perf_counter() - start)
2018-12-13 04:32:44 +01:00
def _verify_method_is_callable(self, function_path):
if function_path not in self.callable_methods:
raise UnknownAPIMethodError(function_path)
def _get_jsonrpc_method(self, function_path):
if function_path in self.deprecated_methods:
new_command = self.deprecated_methods[function_path].new_command
log.warning('API function \"%s\" is deprecated, please update to use \"%s\"',
function_path, new_command)
function_path = new_command
self._verify_method_is_callable(function_path)
return self.callable_methods.get(function_path)
@staticmethod
def _check_params(function, args_tup, args_dict):
argspec = inspect.getfullargspec(undecorated(function))
num_optional_params = 0 if argspec.defaults is None else len(argspec.defaults)
duplicate_params = [
duplicate_param
for duplicate_param in argspec.args[1:len(args_tup) + 1]
if duplicate_param in args_dict
]
if duplicate_params:
return 'Duplicate parameters', duplicate_params
missing_required_params = [
required_param
2019-10-04 15:52:29 +02:00
for required_param in argspec.args[len(args_tup) + 1:-num_optional_params]
2018-12-13 04:32:44 +01:00
if required_param not in args_dict
]
if len(missing_required_params) > 0:
2018-12-13 04:32:44 +01:00
return 'Missing required parameters', missing_required_params
extraneous_params = [] if argspec.varkw is not None else [
extra_param
for extra_param in args_dict
if extra_param not in argspec.args[1:]
]
if len(extraneous_params) > 0:
2018-12-13 04:32:44 +01:00
return 'Extraneous parameters', extraneous_params
return None, None
2018-07-05 04:16:02 +02:00
@property
2020-01-03 04:18:49 +01:00
def ledger(self) -> Optional['Ledger']:
2018-08-16 01:23:06 +02:00
try:
return self.wallet_manager.default_account.ledger
2018-08-16 01:23:06 +02:00
except AttributeError:
return None
2018-07-05 04:16:02 +02:00
2019-01-22 23:44:17 +01:00
async def get_est_cost_from_uri(self, uri: str) -> typing.Optional[float]:
2016-11-30 22:23:48 +01:00
"""
Resolve a name and return the estimated stream cost
"""
2017-06-09 19:47:13 +02:00
2019-10-29 13:58:55 +01:00
resolved = await self.resolve([], uri)
2017-06-09 19:47:13 +02:00
if resolved:
claim_response = resolved[uri]
else:
2017-04-11 04:47:54 +02:00
claim_response = None
if claim_response and 'claim' in claim_response:
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
2019-03-20 06:46:23 +01:00
claim_value = Claim.from_bytes(claim_response['claim']['value'])
if not claim_value.stream.has_fee:
2019-01-22 23:44:17 +01:00
return 0.0
return round(
self.exchange_rate_manager.convert_currency(
2019-03-20 06:46:23 +01:00
claim_value.stream.fee.currency, "LBC", claim_value.stream.fee.amount
2019-01-22 23:44:17 +01:00
), 5
)
else:
log.warning("Failed to estimate cost for %s", uri)
2017-01-03 20:13:01 +01:00
############################################################################
# #
# JSON-RPC API methods start here #
# #
############################################################################
2020-01-08 18:39:51 +01:00
def jsonrpc_stop(self): # pylint: disable=no-self-use
"""
2019-01-25 00:22:53 +01:00
Stop lbrynet API server.
Usage:
stop
Options:
None
Returns:
(string) Shutdown message
"""
def shutdown():
raise web.GracefulExit()
log.info("Shutting down lbrynet daemon")
asyncio.get_event_loop().call_later(0, shutdown)
return "Shutting down"
async def jsonrpc_ffmpeg_find(self):
"""
Get ffmpeg installation information
Usage:
ffmpeg_find
Options:
None
Returns:
(dict) Dictionary of ffmpeg information
{
'available': (bool) found ffmpeg,
'which': (str) path to ffmpeg,
'analyze_audio_volume': (bool) should ffmpeg analyze audio
}
"""
return await self._video_file_analyzer.status(reset=True, recheck=True)
2018-12-15 21:31:02 +01:00
async def jsonrpc_status(self):
2016-03-24 03:27:48 +01:00
"""
2017-05-28 22:01:53 +02:00
Get daemon status
2017-05-28 22:01:53 +02:00
Usage:
2018-08-02 23:33:56 +02:00
status
2017-06-12 22:19:26 +02:00
2018-08-03 19:31:51 +02:00
Options:
None
2017-06-12 22:19:26 +02:00
Returns:
(dict) lbrynet-daemon status
{
'installation_id': (str) installation id - base58,
'is_running': (bool),
'skipped_components': (list) [names of skipped components (str)],
'startup_status': { Does not include components which have been skipped
2019-02-19 23:26:08 +01:00
'blob_manager': (bool),
'blockchain_headers': (bool),
'database': (bool),
'dht': (bool),
2019-02-19 23:26:08 +01:00
'exchange_rate_manager': (bool),
'hash_announcer': (bool),
'peer_protocol_server': (bool),
'file_manager': (bool),
'libtorrent_component': (bool),
'upnp': (bool),
2019-02-19 23:26:08 +01:00
'wallet': (bool),
2017-06-12 22:19:26 +02:00
},
'connection_status': {
'code': (str) connection status code,
'message': (str) connection status message
2017-06-12 22:19:26 +02:00
},
2018-08-02 23:33:56 +02:00
'blockchain_headers': {
'downloading_headers': (bool),
'download_progress': (float) 0-100.0
},
'wallet': {
'connected': (str) host and port of the connected spv server,
'blocks': (int) local blockchain height,
'blocks_behind': (int) remote_height - local_height,
'best_blockhash': (str) block hash of most recent block,
2018-08-13 04:04:48 +02:00
'is_encrypted': (bool),
'is_locked': (bool),
'connected_servers': (list) [
{
'host': (str) server hostname,
'port': (int) server port,
'latency': (int) milliseconds
}
],
2017-06-12 22:19:26 +02:00
},
'libtorrent_component': {
'running': (bool) libtorrent was detected and started successfully,
},
'dht': {
2018-07-20 23:22:10 +02:00
'node_id': (str) lbry dht node id - hex encoded,
'peers_in_routing_table': (int) the number of peers in the routing table,
},
2018-08-02 23:33:56 +02:00
'blob_manager': {
'finished_blobs': (int) number of finished blobs in the blob manager,
2019-06-03 05:50:17 +02:00
'connections': {
'incoming_bps': {
<source ip and tcp port>: (int) bytes per second received,
},
'outgoing_bps': {
<destination ip and tcp port>: (int) bytes per second sent,
},
'total_outgoing_mps': (float) megabytes per second sent,
'total_incoming_mps': (float) megabytes per second received,
'max_outgoing_mbs': (float) maximum bandwidth (megabytes per second) sent, since the
daemon was started
'max_incoming_mbs': (float) maximum bandwidth (megabytes per second) received, since the
daemon was started
'total_sent' : (int) total number of bytes sent since the daemon was started
'total_received' : (int) total number of bytes received since the daemon was started
2019-06-03 05:50:17 +02:00
}
2018-08-02 23:33:56 +02:00
},
'hash_announcer': {
'announce_queue_size': (int) number of blobs currently queued to be announced
},
'file_manager': {
2019-02-19 23:26:08 +01:00
'managed_files': (int) count of files in the stream manager,
},
'upnp': {
'aioupnp_version': (str),
'redirects': {
<TCP | UDP>: (int) external_port,
},
'gateway': (str) manufacturer and model,
'dht_redirect_set': (bool),
'peer_redirect_set': (bool),
'external_ip': (str) external ip address,
2018-08-02 23:33:56 +02:00
}
2017-06-12 22:19:26 +02:00
}
2017-01-03 20:13:01 +01:00
"""
ffmpeg_status = await self._video_file_analyzer.status()
running_components = self.component_manager.get_components_status()
2017-01-03 20:13:01 +01:00
response = {
2019-01-21 21:55:50 +01:00
'installation_id': self.installation_id,
'is_running': all(running_components.values()),
'skipped_components': self.component_manager.skip_components,
'startup_status': running_components,
'ffmpeg_status': ffmpeg_status
2017-01-03 20:13:01 +01:00
}
for component in self.component_manager.components:
status = await component.get_status()
if status:
response[component.component_name] = status
2018-12-15 21:31:02 +01:00
return response
2020-01-08 18:39:51 +01:00
def jsonrpc_version(self): # pylint: disable=no-self-use
"""
2019-01-25 00:22:53 +01:00
Get lbrynet API server version information
2017-05-28 22:01:53 +02:00
Usage:
version
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of lbry version information
{
2017-03-15 21:31:58 +01:00
'processor': (str) processor type,
'python_version': (str) python version,
'platform': (str) platform string,
'os_release': (str) os release string,
'os_system': (str) os name,
'version': (str) lbrynet version,
'build': (str) "dev" | "qa" | "rc" | "release",
2017-03-14 00:14:11 +01:00
}
"""
2020-02-13 16:04:50 +01:00
return self.platform_info
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_resolve(self, urls: typing.Union[str, list], wallet_id=None, **kwargs):
2017-01-02 20:52:24 +01:00
"""
2019-03-24 21:55:04 +01:00
Get the claim that a URL refers to.
2017-01-02 20:52:24 +01:00
2017-05-28 22:01:53 +02:00
Usage:
resolve <urls>... [--wallet_id=<wallet_id>]
[--include_purchase_receipt]
[--include_is_my_output]
2020-03-22 06:13:26 +01:00
[--include_sent_supports]
[--include_sent_tips]
[--include_received_tips]
2020-08-04 17:33:39 +02:00
[--new_sdk_server=<new_sdk_server>]
2017-05-28 22:01:53 +02:00
Options:
--urls=<urls> : (str, list) one or more urls to resolve
2021-02-17 00:38:07 +01:00
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase receipts
2020-08-04 20:12:40 +02:00
--new_sdk_server=<new_sdk_server> : (str) URL of the new SDK server (EXPERIMENTAL)
2020-03-22 05:24:38 +01:00
--include_purchase_receipt : (bool) lookup and include a receipt if this wallet
has purchased the claim being resolved
--include_is_my_output : (bool) lookup and include a boolean indicating
if claim being resolved is yours
2020-03-22 06:13:26 +01:00
--include_sent_supports : (bool) lookup and sum the total amount
of supports you've made to this claim
2020-03-22 06:13:26 +01:00
--include_sent_tips : (bool) lookup and sum the total amount
of tips you've made to this claim
2020-03-22 06:13:26 +01:00
(only makes sense when claim is not yours)
--include_received_tips : (bool) lookup and sum the total amount
of tips you've received to this claim
(only makes sense when claim is yours)
2017-01-02 20:52:24 +01:00
Returns:
2019-03-24 21:55:04 +01:00
Dictionary of results, keyed by url
'<url>': {
If a resolution error occurs:
'error': Error message
If the url resolves to a channel or a claim in a channel:
'certificate': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
2019-04-29 05:03:15 +02:00
'confirmations': (int) claim depth,
'timestamp': (int) timestamp of the block that included this claim tx,
2019-03-24 21:55:04 +01:00
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the certificate claim,
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}],
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
If the url resolves to a channel:
'claims_in_channel': (int) number of claims in the channel,
If the url resolves to a claim:
'claim': {
'address': (str) claim address,
'amount': (float) claim amount,
'effective_amount': (float) claim amount including supports,
'claim_id': (str) claim id,
'claim_sequence': (int) claim sequence number (or -1 if unknown),
'decoded_claim': (bool) whether or not the claim value was decoded,
'height': (int) claim height,
'depth': (int) claim depth,
'has_signature': (bool) included if decoded_claim
'name': (str) claim name,
'permanent_url': (str) permanent url of the claim,
'channel_name': (str) channel name if claim is in a channel
'supports: (list) list of supports [{'txid': (str) txid,
'nout': (int) nout,
'amount': (float) amount}]
'txid': (str) claim txid,
'nout': (str) claim nout,
'signature_is_valid': (bool), included if has_signature,
'value': ClaimDict if decoded, otherwise hex string
}
}
2017-01-02 20:52:24 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2019-01-22 23:44:17 +01:00
2019-03-24 21:55:04 +01:00
if isinstance(urls, str):
urls = [urls]
results = {}
valid_urls = set()
for url in urls:
2019-03-24 21:55:04 +01:00
try:
URL.parse(url)
valid_urls.add(url)
2019-04-29 06:38:58 +02:00
except ValueError:
results[url] = {"error": f"{url} is not a valid url"}
2019-03-24 21:55:04 +01:00
resolved = await self.resolve(wallet.accounts, list(valid_urls), **kwargs)
2019-03-24 21:55:04 +01:00
for resolved_uri in resolved:
2019-04-29 06:38:58 +02:00
results[resolved_uri] = resolved[resolved_uri] if resolved[resolved_uri] is not None else \
2019-10-04 15:52:29 +02:00
{"error": f"{resolved_uri} did not resolve to a claim"}
2019-03-24 21:55:04 +01:00
return results
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT,
FILE_MANAGER_COMPONENT)
async def jsonrpc_get(
self, uri, file_name=None, download_directory=None, timeout=None, save_file=None, wallet_id=None):
2019-03-24 21:55:04 +01:00
"""
Download stream from a LBRY name.
Usage:
get <uri> [<file_name> | --file_name=<file_name>]
[<download_directory> | --download_directory=<download_directory>] [<timeout> | --timeout=<timeout>]
[--save_file=<save_file>] [--wallet_id=<wallet_id>]
2019-03-24 21:55:04 +01:00
Options:
--uri=<uri> : (str) uri of the content to download
2019-04-05 06:22:17 +02:00
--file_name=<file_name> : (str) specified name for the downloaded file, overrides the stream file name
--download_directory=<download_directory> : (str) full path to the directory to download into
2019-03-24 21:55:04 +01:00
--timeout=<timeout> : (int) download timeout in number of seconds
--save_file=<save_file> : (bool) save the file to the downloads directory
2021-02-17 00:38:07 +01:00
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase receipts
2019-03-24 21:55:04 +01:00
2019-04-06 21:55:08 +02:00
Returns: {File}
2019-03-24 21:55:04 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if download_directory and not os.path.isdir(download_directory):
return {"error": f"specified download directory \"{download_directory}\" does not exist"}
2019-03-24 21:55:04 +01:00
try:
stream = await self.file_manager.download_from_uri(
uri, self.exchange_rate_manager, timeout, file_name, download_directory,
save_file=save_file, wallet=wallet
2019-03-24 21:55:04 +01:00
)
if not stream:
raise DownloadSDTimeoutError(uri)
2019-03-24 21:55:04 +01:00
except Exception as e:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
log.warning("Error downloading %s: %s", uri, str(e))
return {"error": str(e)}
2019-04-06 21:55:08 +02:00
return stream
2017-01-02 20:52:24 +01:00
2019-01-21 21:55:50 +01:00
SETTINGS_DOC = """
Settings management.
"""
2017-01-03 20:13:01 +01:00
def jsonrpc_settings_get(self):
"""
Get daemon settings
2017-05-28 22:01:53 +02:00
Usage:
settings_get
Options:
None
Returns:
2017-03-14 00:14:11 +01:00
(dict) Dictionary of daemon settings
See ADJUSTABLE_SETTINGS in lbry/conf.py for full list of settings
"""
2019-01-21 21:55:50 +01:00
return self.conf.settings_dict
def jsonrpc_settings_set(self, key, value):
"""
Set daemon settings
Usage:
2019-02-19 23:26:08 +01:00
settings_set (<key>) (<value>)
Options:
None
Returns:
(dict) Updated dictionary of daemon settings
"""
2019-01-21 21:55:50 +01:00
with self.conf.update_config() as c:
2019-12-30 21:45:21 +01:00
if value and isinstance(value, str) and value[0] in ('[', '{'):
value = json.loads(value)
attr: Setting = getattr(type(c), key)
cleaned = attr.deserialize(value)
setattr(c, key, cleaned)
return {key: cleaned}
def jsonrpc_settings_clear(self, key):
"""
Clear daemon settings
Usage:
settings_clear (<key>)
Options:
None
Returns:
(dict) Updated dictionary of daemon settings
"""
with self.conf.update_config() as c:
setattr(c, key, NOT_SET)
2019-11-23 03:41:27 +01:00
return {key: self.conf.settings_dict[key]}
2019-09-03 15:51:41 +02:00
PREFERENCE_DOC = """
Preferences management.
"""
def jsonrpc_preference_get(self, key=None, wallet_id=None):
2019-09-03 15:51:41 +02:00
"""
Get preference value for key or all values if not key is passed in.
Usage:
2019-09-20 15:25:50 +02:00
preference_get [<key>] [--wallet_id=<wallet_id>]
2019-09-03 15:51:41 +02:00
Options:
--key=<key> : (str) key associated with value
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-09-03 15:51:41 +02:00
Returns:
(dict) Dictionary of preference(s)
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2019-09-03 15:51:41 +02:00
if key:
if key in wallet.preferences:
return {key: wallet.preferences[key]}
2019-09-03 15:51:41 +02:00
return
return wallet.preferences.to_dict_without_ts()
2019-09-03 15:51:41 +02:00
def jsonrpc_preference_set(self, key, value, wallet_id=None):
2019-09-03 15:51:41 +02:00
"""
Set preferences
Usage:
2019-09-20 15:25:50 +02:00
preference_set (<key>) (<value>) [--wallet_id=<wallet_id>]
2019-09-03 15:51:41 +02:00
Options:
--key=<key> : (str) key associated with value
--value=<key> : (str) key associated with value
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-09-03 15:51:41 +02:00
Returns:
(dict) Dictionary with key/value of new preference
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2019-09-03 17:30:10 +02:00
if value and isinstance(value, str) and value[0] in ('[', '{'):
value = json.loads(value)
wallet.preferences[key] = value
wallet.save()
2019-09-03 15:51:41 +02:00
return {key: value}
WALLET_DOC = """
Create, modify and inspect wallets.
"""
@requires("wallet")
def jsonrpc_wallet_list(self, wallet_id=None, page=None, page_size=None):
"""
List wallets.
Usage:
wallet_list [--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
Options:
2019-09-20 22:47:49 +02:00
--wallet_id=<wallet_id> : (str) show specific wallet only
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Wallet]}
"""
2019-09-20 22:47:49 +02:00
if wallet_id:
return paginate_list([self.wallet_manager.get_wallet_or_error(wallet_id)], 1, 1)
return paginate_list(self.wallet_manager.wallets, page, page_size)
2019-12-11 01:27:46 +01:00
def jsonrpc_wallet_reconnect(self):
2019-12-05 21:34:24 +01:00
"""
2019-12-11 01:27:46 +01:00
Reconnects ledger network client, applying new configurations.
2019-12-05 21:34:24 +01:00
Usage:
2019-12-11 01:27:46 +01:00
wallet_reconnect
2019-12-05 21:34:24 +01:00
Options:
Returns: None
"""
return self.wallet_manager.reset()
@requires("wallet")
async def jsonrpc_wallet_create(
self, wallet_id, skip_on_startup=False, create_account=False, single_key=False):
"""
Create a new wallet.
Usage:
wallet_create (<wallet_id> | --wallet_id=<wallet_id>) [--skip_on_startup]
2019-09-23 19:09:49 +02:00
[--create_account] [--single_key]
Options:
--wallet_id=<wallet_id> : (str) wallet file name
--skip_on_startup : (bool) don't add wallet to daemon_settings.yml
--create_account : (bool) generates the default account
--single_key : (bool) used with --create_account, creates single-key account
Returns: {Wallet}
"""
wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id)
for wallet in self.wallet_manager.wallets:
if wallet.id == wallet_id:
raise WalletAlreadyLoadedError(wallet_path)
if os.path.exists(wallet_path):
raise WalletAlreadyExistsError(wallet_path)
wallet = self.wallet_manager.import_wallet(wallet_path)
if not wallet.accounts and create_account:
2020-01-03 04:18:49 +01:00
account = Account.generate(
self.ledger, wallet, address_generator={
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
)
if self.ledger.network.is_connected:
await self.ledger.subscribe_account(account)
wallet.save()
if not skip_on_startup:
with self.conf.update_config() as c:
c.wallets += [wallet_id]
return wallet
@requires("wallet")
async def jsonrpc_wallet_add(self, wallet_id):
"""
Add existing wallet.
Usage:
wallet_add (<wallet_id> | --wallet_id=<wallet_id>)
Options:
--wallet_id=<wallet_id> : (str) wallet file name
Returns: {Wallet}
"""
wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id)
for wallet in self.wallet_manager.wallets:
if wallet.id == wallet_id:
raise WalletAlreadyLoadedError(wallet_path)
if not os.path.exists(wallet_path):
raise WalletNotFoundError(wallet_path)
2019-10-30 19:06:20 +01:00
wallet = self.wallet_manager.import_wallet(wallet_path)
if self.ledger.network.is_connected:
for account in wallet.accounts:
await self.ledger.subscribe_account(account)
return wallet
@requires("wallet")
async def jsonrpc_wallet_remove(self, wallet_id):
"""
2019-09-20 22:00:00 +02:00
Remove an existing wallet.
Usage:
wallet_remove (<wallet_id> | --wallet_id=<wallet_id>)
Options:
--wallet_id=<wallet_id> : (str) name of wallet to remove
Returns: {Wallet}
"""
wallet = self.wallet_manager.get_wallet_or_error(wallet_id)
2019-09-30 18:10:59 +02:00
self.wallet_manager.wallets.remove(wallet)
for account in wallet.accounts:
await self.ledger.unsubscribe_account(account)
2019-09-30 18:10:59 +02:00
return wallet
2019-10-14 01:32:10 +02:00
@requires("wallet")
async def jsonrpc_wallet_balance(self, wallet_id=None, confirmations=0):
2019-10-14 01:32:10 +02:00
"""
Return the balance of a wallet
Usage:
wallet_balance [--wallet_id=<wallet_id>] [--confirmations=<confirmations>]
2019-10-14 01:32:10 +02:00
Options:
--wallet_id=<wallet_id> : (str) balance for specific wallet
--confirmations=<confirmations> : (int) Only include transactions with this many
confirmed blocks.
Returns:
(decimal) amount of lbry credits in wallet
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
balance = await self.ledger.get_detailed_balance(
accounts=wallet.accounts, confirmations=confirmations
2019-10-14 01:32:10 +02:00
)
return dict_values_to_lbc(balance)
2019-10-14 05:43:06 +02:00
def jsonrpc_wallet_status(self, wallet_id=None):
"""
Status of wallet including encryption/lock state.
Usage:
wallet_status [<wallet_id> | --wallet_id=<wallet_id>]
Options:
--wallet_id=<wallet_id> : (str) status of specific wallet
Returns:
Dictionary of wallet status information.
"""
if self.wallet_manager is None:
return {'is_encrypted': None, 'is_syncing': None, 'is_locked': None}
2019-10-14 05:43:06 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
return {
'is_encrypted': wallet.is_encrypted,
'is_syncing': len(self.ledger._update_tasks) > 0,
'is_locked': wallet.is_locked
}
2019-10-14 05:43:06 +02:00
2019-10-09 16:34:55 +02:00
@requires(WALLET_COMPONENT)
def jsonrpc_wallet_unlock(self, password, wallet_id=None):
"""
Unlock an encrypted wallet
Usage:
wallet_unlock (<password> | --password=<password>) [--wallet_id=<wallet_id>]
Options:
--password=<password> : (str) password to use for unlocking
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(bool) true if wallet is unlocked, otherwise false
"""
return self.wallet_manager.get_wallet_or_default(wallet_id).unlock(password)
2019-10-09 16:34:55 +02:00
@requires(WALLET_COMPONENT)
def jsonrpc_wallet_lock(self, wallet_id=None):
"""
Lock an unlocked wallet
Usage:
wallet_lock [--wallet_id=<wallet_id>]
Options:
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(bool) true if wallet is locked, otherwise false
"""
return self.wallet_manager.get_wallet_or_default(wallet_id).lock()
2019-10-09 16:34:55 +02:00
@requires(WALLET_COMPONENT)
2019-10-09 16:34:55 +02:00
def jsonrpc_wallet_decrypt(self, wallet_id=None):
"""
Decrypt an encrypted wallet, this will remove the wallet password. The wallet must be unlocked to decrypt it
Usage:
wallet_decrypt [--wallet_id=<wallet_id>]
Options:
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.get_wallet_or_default(wallet_id).decrypt()
2019-10-09 16:34:55 +02:00
@requires(WALLET_COMPONENT)
2019-10-09 16:34:55 +02:00
def jsonrpc_wallet_encrypt(self, new_password, wallet_id=None):
"""
Encrypt an unencrypted wallet with a password
Usage:
wallet_encrypt (<new_password> | --new_password=<new_password>)
[--wallet_id=<wallet_id>]
Options:
--new_password=<new_password> : (str) password to encrypt account
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(bool) true if wallet is decrypted, otherwise false
"""
return self.wallet_manager.get_wallet_or_default(wallet_id).encrypt(new_password)
2019-10-09 16:34:55 +02:00
@requires(WALLET_COMPONENT)
2019-10-14 15:49:08 +02:00
async def jsonrpc_wallet_send(
self, amount, addresses, wallet_id=None,
2020-06-17 06:50:53 +02:00
change_account_id=None, funding_account_ids=None, preview=False, blocking=True):
2019-10-14 15:49:08 +02:00
"""
Send the same number of credits to multiple addresses using all accounts in wallet to
fund the transaction and the default account to receive any change.
Usage:
wallet_send <amount> <addresses>... [--wallet_id=<wallet_id>] [--preview]
[--change_account_id=None] [--funding_account_ids=<funding_account_ids>...]
2020-06-17 06:50:53 +02:00
[--blocking]
2019-10-14 15:49:08 +02:00
Options:
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--change_account_id=<wallet_id> : (str) account where change will go
--funding_account_ids=<funding_account_ids> : (str) accounts to fund the transaction
2020-06-17 06:50:53 +02:00
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until tx has synced
2019-10-14 15:49:08 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
2019-10-14 15:49:08 +02:00
account = wallet.get_account_or_default(change_account_id)
accounts = wallet.get_accounts_or_all(funding_account_ids)
amount = self.get_dewies_or_error("amount", amount)
if addresses and not isinstance(addresses, list):
addresses = [addresses]
outputs = []
for address in addresses:
self.valid_address_or_error(address, allow_script_address=True)
if self.ledger.is_pubkey_address(address):
outputs.append(
Output.pay_pubkey_hash(
amount, self.ledger.address_to_hash160(address)
)
2019-10-14 15:49:08 +02:00
)
elif self.ledger.is_script_address(address):
outputs.append(
Output.pay_script_hash(
amount, self.ledger.address_to_hash160(address)
)
)
else:
raise ValueError(f"Unsupported address: '{address}'") # TODO: use error from lbry.error
2019-10-14 15:49:08 +02:00
tx = await Transaction.create(
[], outputs, accounts, account
)
if not preview:
2020-06-17 06:50:53 +02:00
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_credits_sent())
2019-10-14 15:49:08 +02:00
else:
await self.ledger.release_tx(tx)
return tx
2019-01-21 21:55:50 +01:00
ACCOUNT_DOC = """
2019-03-26 03:06:36 +01:00
Create, modify and inspect wallet accounts.
2019-01-21 21:55:50 +01:00
"""
@requires("wallet")
async def jsonrpc_account_list(
self, account_id=None, wallet_id=None, confirmations=0,
include_claims=False, show_seed=False, page=None, page_size=None):
"""
2018-08-30 06:04:25 +02:00
List details of all of the accounts or a specific account.
Usage:
account_list [<account_id>] [--wallet_id=<wallet_id>]
[--confirmations=<confirmations>]
[--include_claims] [--show_seed]
[--page=<page>] [--page_size=<page_size>]
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given
--wallet_id=<wallet_id> : (str) accounts in specific wallet
--confirmations=<confirmations> : (int) required confirmations (default: 0)
--include_claims : (bool) include claims, requires than a
LBC account is specified (default: false)
2018-08-30 06:04:25 +02:00
--show_seed : (bool) show the seed for the account
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Account]}
"""
kwargs = {
'confirmations': confirmations,
'show_seed': show_seed
}
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2018-08-30 06:04:25 +02:00
if account_id:
return paginate_list([await wallet.get_account_or_error(account_id).get_details(**kwargs)], 1, 1)
else:
return paginate_list(await wallet.get_detailed_accounts(**kwargs), page, page_size)
@requires("wallet")
async def jsonrpc_account_balance(self, account_id=None, wallet_id=None, confirmations=0):
2017-01-03 20:13:01 +01:00
"""
Return the balance of an account
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
account_balance [<account_id>] [<address> | --address=<address>] [--wallet_id=<wallet_id>]
[<confirmations> | --confirmations=<confirmations>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) If provided only the balance for this
account will be given. Otherwise default account.
--wallet_id=<wallet_id> : (str) balance for specific wallet
--confirmations=<confirmations> : (int) Only include transactions with this many
confirmed blocks.
2017-04-23 19:33:06 +02:00
Returns:
(decimal) amount of lbry credits in wallet
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_default(account_id)
2019-10-14 01:32:10 +02:00
balance = await account.get_detailed_balance(
2021-04-14 15:51:36 +02:00
confirmations=confirmations, read_only=True
2019-10-14 01:32:10 +02:00
)
return dict_values_to_lbc(balance)
2017-04-23 19:33:06 +02:00
@requires("wallet")
2018-10-16 21:04:20 +02:00
async def jsonrpc_account_add(
2019-09-20 15:25:50 +02:00
self, account_name, wallet_id=None, single_key=False,
seed=None, private_key=None, public_key=None):
"""
Add a previously created account from a seed, private key or public key (read-only).
Specify --single_key for single address or vanity address accounts.
Usage:
account_add (<account_name> | --account_name=<account_name>)
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
2019-09-20 15:25:50 +02:00
[--single_key] [--wallet_id=<wallet_id>]
Options:
--account_name=<account_name> : (str) name of the account to add
--seed=<seed> : (str) seed to generate new account from
--private_key=<private_key> : (str) private key for new account
--public_key=<public_key> : (str) public key for new account
--single_key : (bool) create single key account, default is multi-key
2019-09-20 15:25:50 +02:00
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-04-06 21:33:07 +02:00
Returns: {Account}
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2020-01-03 04:18:49 +01:00
account = Account.from_dict(
2019-09-20 15:25:50 +02:00
self.ledger, wallet, {
'name': account_name,
'seed': seed,
'private_key': private_key,
'public_key': public_key,
'address_generator': {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
}
)
2019-09-20 15:25:50 +02:00
wallet.save()
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2019-04-06 21:33:07 +02:00
return account
@requires("wallet")
2019-09-20 15:25:50 +02:00
async def jsonrpc_account_create(self, account_name, single_key=False, wallet_id=None):
"""
Create a new account. Specify --single_key if you want to use
the same address for all transactions (not recommended).
Usage:
2019-09-20 15:25:50 +02:00
account_create (<account_name> | --account_name=<account_name>)
[--single_key] [--wallet_id=<wallet_id>]
Options:
--account_name=<account_name> : (str) name of the account to create
--single_key : (bool) create single key account, default is multi-key
2019-09-20 15:25:50 +02:00
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-04-06 21:33:07 +02:00
Returns: {Account}
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2020-01-03 04:18:49 +01:00
account = Account.generate(
2019-09-20 15:25:50 +02:00
self.ledger, wallet, account_name, {
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
}
)
2019-09-20 15:25:50 +02:00
wallet.save()
2018-08-30 06:04:25 +02:00
if self.ledger.network.is_connected:
2018-11-20 01:23:23 +01:00
await self.ledger.subscribe_account(account)
2019-04-06 21:33:07 +02:00
return account
@requires("wallet")
def jsonrpc_account_remove(self, account_id, wallet_id=None):
"""
Remove an existing account.
Usage:
account_remove (<account_id> | --account_id=<account_id>) [--wallet_id=<wallet_id>]
Options:
--account_id=<account_id> : (str) id of the account to remove
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-04-06 21:33:07 +02:00
Returns: {Account}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_error(account_id)
wallet.accounts.remove(account)
wallet.save()
2019-04-06 21:33:07 +02:00
return account
@requires("wallet")
def jsonrpc_account_set(
self, account_id, wallet_id=None, default=False, new_name=None,
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
"""
Change various settings on an account.
Usage:
account_set (<account_id> | --account_id=<account_id>) [--wallet_id=<wallet_id>]
2018-08-30 06:04:25 +02:00
[--default] [--new_name=<new_name>]
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
Options:
--account_id=<account_id> : (str) id of the account to change
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--default : (bool) make this account the default
2018-08-30 06:04:25 +02:00
--new_name=<new_name> : (str) new name for the account
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
use a receiving address
--change_gap=<change_gap> : (int) set the gap for change addresses
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
use a change address
2019-04-06 21:33:07 +02:00
Returns: {Account}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_error(account_id)
change_made = False
if account.receiving.name == HierarchicalDeterministic.name:
address_changes = {
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
}
2021-08-21 04:36:35 +02:00
for chain_name, changes in address_changes.items():
chain = getattr(account, chain_name)
2021-08-21 04:36:35 +02:00
for attr, value in changes.items():
if value is not None:
setattr(chain, attr, value)
change_made = True
2018-08-30 06:04:25 +02:00
if new_name is not None:
account.name = new_name
change_made = True
if default and wallet.default_account != account:
wallet.accounts.remove(account)
wallet.accounts.insert(0, account)
change_made = True
if change_made:
2020-07-20 20:28:19 +02:00
account.modified_on = int(time.time())
wallet.save()
2019-04-06 21:33:07 +02:00
return account
@requires("wallet")
2019-09-20 22:00:00 +02:00
def jsonrpc_account_max_address_gap(self, account_id, wallet_id=None):
"""
Finds ranges of consecutive addresses that are unused and returns the length
of the longest such range: for change and receiving address chains. This is
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
account settings.
Usage:
account_max_address_gap (<account_id> | --account_id=<account_id>)
2019-09-20 22:00:00 +02:00
[--wallet_id=<wallet_id>]
Options:
--account_id=<account_id> : (str) account for which to get max gaps
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(map) maximum gap for change and receiving addresses
"""
2019-09-20 22:00:00 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
return wallet.get_account_or_error(account_id).get_max_gap()
@requires("wallet")
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
everything=False, outputs=1, broadcast=False, wallet_id=None):
"""
Transfer some amount (or --everything) to an account from another
account (can be the same account). Amounts are interpreted as LBC.
You can also spread the transfer across a number of --outputs (cannot
be used together with --everything).
Usage:
account_fund [<to_account> | --to_account=<to_account>]
[<from_account> | --from_account=<from_account>]
(<amount> | --amount=<amount> | --everything)
[<outputs> | --outputs=<outputs>] [--wallet_id=<wallet_id>]
[--broadcast]
Options:
--to_account=<to_account> : (str) send to this account
--from_account=<from_account> : (str) spend from this account
--amount=<amount> : (decimal) the amount to transfer lbc
--everything : (bool) transfer everything (excluding claims), default: false.
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
--wallet_id=<wallet_id> : (str) limit operation to specific wallet.
--broadcast : (bool) actually broadcast the transaction, default: false.
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
to_account = wallet.get_account_or_default(to_account)
from_account = wallet.get_account_or_default(from_account)
amount = self.get_dewies_or_error('amount', amount) if amount else None
if not isinstance(outputs, int):
# TODO: use error from lbry.error
raise ValueError("--outputs must be an integer.")
if everything and outputs > 1:
# TODO: use error from lbry.error
raise ValueError("Using --everything along with --outputs is not supported.")
return from_account.fund(
to_account=to_account, amount=amount, everything=everything,
outputs=outputs, broadcast=broadcast
)
@requires("wallet")
async def jsonrpc_account_deposit(
self, txid, nout, redeem_script, private_key,
to_account=None, wallet_id=None, preview=False, blocking=False
):
"""
Spend a time locked transaction into your account.
Usage:
account_deposit <txid> <nout> <redeem_script> <private_key>
[<to_account> | --to_account=<to_account>]
[--wallet_id=<wallet_id>] [--preview] [--blocking]
Options:
--txid=<txid> : (str) id of the transaction
--nout=<nout> : (int) output number in the transaction
--redeem_script=<redeem_script> : (str) redeem script for output
--private_key=<private_key> : (str) private key to sign transaction
--to_account=<to_account> : (str) deposit to this account
--wallet_id=<wallet_id> : (str) limit operation to specific wallet.
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until tx has synced
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_default(to_account)
other_tx = await self.wallet_manager.get_transaction(txid)
tx = await Transaction.spend_time_lock(
other_tx.outputs[nout], unhexlify(redeem_script), account
)
pk = PrivateKey.from_bytes(
account.ledger, Base58.decode_check(private_key)[1:-1]
)
2022-03-16 05:06:31 +01:00
await tx.sign([account], {pk.address: pk})
if not preview:
await self.broadcast_or_release(tx, blocking)
self.component_manager.loop.create_task(self.analytics_manager.send_credits_sent())
else:
await self.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
2020-07-02 23:28:47 +02:00
def jsonrpc_account_send(self, amount, addresses, account_id=None, wallet_id=None, preview=False, blocking=False):
"""
2019-10-14 15:49:08 +02:00
Send the same number of credits to multiple addresses from a specific account (or default account).
Usage:
account_send <amount> <addresses>... [--account_id=<account_id>] [--wallet_id=<wallet_id>] [--preview]
2020-07-02 23:28:47 +02:00
[--blocking]
Options:
--account_id=<account_id> : (str) account to fund the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-03-25 00:45:54 +01:00
--preview : (bool) do not broadcast the transaction
2020-07-02 23:28:47 +02:00
--blocking : (bool) wait until tx has synced
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
2019-10-14 15:49:08 +02:00
return self.jsonrpc_wallet_send(
amount=amount, addresses=addresses, wallet_id=wallet_id,
change_account_id=account_id, funding_account_ids=[account_id] if account_id else [],
2020-07-02 23:28:47 +02:00
preview=preview, blocking=blocking
2019-03-25 00:45:54 +01:00
)
2019-03-12 14:42:28 +01:00
SYNC_DOC = """
Wallet synchronization.
"""
@requires("wallet")
2019-09-20 15:25:50 +02:00
def jsonrpc_sync_hash(self, wallet_id=None):
2019-02-11 00:36:21 +01:00
"""
2019-03-11 14:52:35 +01:00
Deterministic hash of the wallet.
2019-02-11 00:36:21 +01:00
Usage:
2019-09-20 15:25:50 +02:00
sync_hash [<wallet_id> | --wallet_id=<wallet_id>]
2019-02-11 00:36:21 +01:00
Options:
2019-09-20 15:25:50 +02:00
--wallet_id=<wallet_id> : (str) wallet for which to generate hash
2019-02-11 00:36:21 +01:00
Returns:
2019-03-11 14:52:35 +01:00
(str) sha256 hash of wallet
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
return hexlify(wallet.hash).decode()
2019-02-11 00:36:21 +01:00
@requires("wallet")
async def jsonrpc_sync_apply(self, password, data=None, wallet_id=None, blocking=False):
2019-02-11 00:36:21 +01:00
"""
Apply incoming synchronization data, if provided, and return a sync hash and update wallet data.
Wallet must be unlocked to perform this operation.
If "encrypt-on-disk" preference is True and supplied password is different from local password,
or there is no local password (because local wallet was not encrypted), then the supplied password
will be used for local encryption (overwriting previous local encryption password).
2019-03-11 14:52:35 +01:00
Usage:
sync_apply <password> [--data=<data>] [--wallet_id=<wallet_id>] [--blocking]
2019-03-11 14:52:35 +01:00
Options:
--password=<password> : (str) password to decrypt incoming and encrypt outgoing data
--data=<data> : (str) incoming sync data, if any
2019-09-20 15:25:50 +02:00
--wallet_id=<wallet_id> : (str) wallet being sync'ed
--blocking : (bool) wait until any new accounts have sync'ed
2019-03-11 14:52:35 +01:00
Returns:
(map) sync hash and data
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
wallet_changed = False
2019-03-11 14:52:35 +01:00
if data is not None:
added_accounts = wallet.merge(self.wallet_manager, password, data)
if added_accounts and self.ledger.network.is_connected:
if blocking:
await asyncio.wait([
a.ledger.subscribe_account(a) for a in added_accounts
])
2019-03-11 14:52:35 +01:00
else:
for new_account in added_accounts:
2019-03-11 14:52:35 +01:00
asyncio.create_task(self.ledger.subscribe_account(new_account))
wallet_changed = True
if wallet.preferences.get(ENCRYPT_ON_DISK, False) and password != wallet.encryption_password:
wallet.encryption_password = password
wallet_changed = True
if wallet_changed:
2019-09-20 15:25:50 +02:00
wallet.save()
encrypted = wallet.pack(password)
2019-02-11 00:36:21 +01:00
return {
2019-09-20 15:25:50 +02:00
'hash': self.jsonrpc_sync_hash(wallet_id),
2019-03-11 14:52:35 +01:00
'data': encrypted.decode()
2019-02-11 00:36:21 +01:00
}
2019-01-21 21:55:50 +01:00
ADDRESS_DOC = """
2019-03-26 03:06:36 +01:00
List, generate and verify addresses.
2019-01-21 21:55:50 +01:00
"""
@requires(WALLET_COMPONENT)
async def jsonrpc_address_is_mine(self, address, account_id=None, wallet_id=None):
"""
Checks if an address is associated with the current wallet.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-25 03:59:55 +01:00
address_is_mine (<address> | --address=<address>)
[<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
Options:
--address=<address> : (str) address to check
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
Returns:
(bool) true, if address is associated with current wallet
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_default(account_id)
2020-02-25 20:18:09 +01:00
match = await self.ledger.db.get_address(read_only=True, address=address, accounts=[account])
2019-03-25 00:45:54 +01:00
if match is not None:
return True
return False
@requires(WALLET_COMPONENT)
def jsonrpc_address_list(self, address=None, account_id=None, wallet_id=None, page=None, page_size=None):
"""
List account addresses or details of single address.
Usage:
address_list [--address=<address>] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--address=<address> : (str) just show details for single address
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Address]}
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
constraints = {
2019-09-20 15:25:50 +02:00
'cols': ('address', 'account', 'used_times', 'pubkey', 'chain_code', 'n', 'depth')
}
if address:
constraints['address'] = address
if account_id:
2019-09-20 15:25:50 +02:00
constraints['accounts'] = [wallet.get_account_or_error(account_id)]
else:
constraints['accounts'] = wallet.accounts
return paginate_rows(
self.ledger.get_addresses,
self.ledger.get_address_count,
2020-02-25 20:18:09 +01:00
page, page_size, read_only=True, **constraints
)
@requires(WALLET_COMPONENT)
def jsonrpc_address_unused(self, account_id=None, wallet_id=None):
"""
Return an address containing no balance, will create
a new address if there is none.
Usage:
address_unused [--account_id=<account_id>] [--wallet_id=<wallet_id>]
Options:
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-04-06 21:33:07 +02:00
Returns: {Address}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
return wallet.get_account_or_default(account_id).receiving.get_or_create_usable_address()
2019-01-21 21:55:50 +01:00
FILE_DOC = """
File management.
"""
@requires(FILE_MANAGER_COMPONENT)
async def jsonrpc_file_list(self, sort=None, reverse=False, comparison=None, wallet_id=None, page=None,
page_size=None, **kwargs):
2017-01-03 20:13:01 +01:00
"""
List files limited by optional filters
2017-05-28 22:01:53 +02:00
Usage:
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--rowid=<rowid>] [--added_on=<added_on>] [--claim_id=<claim_id>]
2019-10-11 01:46:00 +02:00
[--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
2020-05-18 19:52:13 +02:00
[--download_path=<download_path>] [--blobs_remaining=<blobs_remaining>]
[--uploading_to_reflector=<uploading_to_reflector>] [--is_fully_reflected=<is_fully_reflected>]
[--status=<status>] [--completed=<completed>] [--sort=<sort_by>] [--comparison=<comparison>]
[--full_status=<full_status>] [--reverse] [--page=<page>] [--page_size=<page_size>]
[--wallet_id=<wallet_id>]
2017-05-28 22:01:53 +02:00
Options:
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--added_on=<added_on> : (int) get file with matching time of insertion
--claim_id=<claim_id> : (str) get file with matching claim id(s)
--outpoint=<outpoint> : (str) get file with matching claim outpoint(s)
--txid=<txid> : (str) get file with matching claim txid
--nout=<nout> : (int) get file with matching claim nout
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id(s)
--channel_name=<channel_name> : (str) get file with matching channel name
--claim_name=<claim_name> : (str) get file with matching claim name
--blobs_in_stream=<blobs_in_stream> : (int) get file with matching blobs in stream
2020-05-18 19:52:13 +02:00
--download_path=<download_path> : (str) get file with matching download path
--uploading_to_reflector=<uploading_to_reflector> : (bool) get files currently uploading to reflector
--is_fully_reflected=<is_fully_reflected> : (bool) get files that have been uploaded to reflector
2020-05-12 01:16:08 +02:00
--status=<status> : (str) match by status, ( running | finished | stopped )
--completed=<completed> : (bool) match only completed
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
--comparison=<comparison> : (str) logical comparison, (eq | ne | g | ge | l | le | in)
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--wallet_id=<wallet_id> : (str) add purchase receipts from this wallet
Returns: {Paginated[File]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2019-02-15 22:44:31 +01:00
sort = sort or 'rowid'
2019-01-22 23:44:17 +01:00
comparison = comparison or 'eq'
paginated = paginate_list(
2020-01-29 01:24:05 +01:00
self.file_manager.get_filtered(sort, reverse, comparison, **kwargs), page, page_size
2019-04-06 21:33:07 +02:00
)
if paginated['items']:
receipts = {
txo.purchased_claim_id: txo for txo in
await self.ledger.db.get_purchases(
accounts=wallet.accounts,
purchased_claim_id__in=[s.claim_id for s in paginated['items']]
)
}
for stream in paginated['items']:
stream.purchase_receipt = receipts.get(stream.claim_id)
return paginated
@requires(FILE_MANAGER_COMPONENT)
2019-03-24 21:55:04 +01:00
async def jsonrpc_file_set_status(self, status, **kwargs):
2016-07-28 22:12:20 +02:00
"""
2019-03-24 21:55:04 +01:00
Start or stop downloading a file
2016-07-28 22:12:20 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--status=<status> : (str) one of "start" or "stop"
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
--file_name=<file_name> : (str) set status of file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
--rowid=<rowid> : (int) set status of file with matching row id
2017-05-28 22:01:53 +02:00
2017-03-14 00:14:11 +01:00
Returns:
2019-03-24 21:55:04 +01:00
(str) Confirmation message
"""
2017-06-23 20:47:28 +02:00
2019-03-24 21:55:04 +01:00
if status not in ['start', 'stop']:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception('Status must be "start" or "stop".')
2017-06-23 20:47:28 +02:00
2020-01-29 01:24:05 +01:00
streams = self.file_manager.get_filtered(**kwargs)
2019-03-24 21:55:04 +01:00
if not streams:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception(f'Unable to find a file for {kwargs}')
stream = streams[0]
if status == 'start' and not stream.running:
if not hasattr(stream, 'bt_infohash') and 'dht' not in self.conf.components_to_skip:
stream.downloader.node = self.dht_node
2020-01-29 01:24:05 +01:00
await stream.save_file()
2019-03-24 21:55:04 +01:00
msg = "Resumed download"
elif status == 'stop' and stream.running:
await stream.stop()
2019-03-24 21:55:04 +01:00
msg = "Stopped download"
else:
2019-03-24 21:55:04 +01:00
msg = (
"File was already being downloaded" if status == 'start'
else "File was already stopped"
)
return msg
@requires(FILE_MANAGER_COMPONENT)
2019-03-24 21:55:04 +01:00
async def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-24 21:55:04 +01:00
Delete a LBRY file
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-24 21:55:04 +01:00
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
[--channel_name=<channel_name>]
2017-05-28 22:01:53 +02:00
Options:
2019-03-24 21:55:04 +01:00
--delete_from_download_dir : (bool) delete file from download directory,
instead of just deleting blobs
--delete_all : (bool) if there are multiple matching files,
allow the deletion of multiple files.
Otherwise do not delete anything.
--sd_hash=<sd_hash> : (str) delete by file sd hash
--file_name=<file_name> : (str) delete by file name in downloads folder
--stream_hash=<stream_hash> : (str) delete by file stream hash
--rowid=<rowid> : (int) delete by file row id
--claim_id=<claim_id> : (str) delete by file claim id
--txid=<txid> : (str) delete by file claim txid
--nout=<nout> : (int) delete by file claim nout
--claim_name=<claim_name> : (str) delete by file claim name
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
--channel_name=<channel_name> : (str) delete by file channel claim name
2017-05-28 22:01:53 +02:00
2017-04-07 02:45:05 +02:00
Returns:
2019-03-24 21:55:04 +01:00
(bool) true if deletion was successful
"""
2016-01-21 04:00:28 +01:00
2020-01-29 01:24:05 +01:00
streams = self.file_manager.get_filtered(**kwargs)
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if len(streams) > 1:
2017-04-07 02:45:05 +02:00
if not delete_all:
log.warning("There are %i files to delete, use narrower filters to select one",
2019-01-22 23:44:17 +01:00
len(streams))
2018-12-15 21:31:02 +01:00
return False
2017-04-07 02:45:05 +02:00
else:
log.warning("Deleting %i files",
2019-01-22 23:44:17 +01:00
len(streams))
2017-04-07 02:45:05 +02:00
2019-01-22 23:44:17 +01:00
if not streams:
2017-03-08 20:19:54 +01:00
log.warning("There is no file to delete")
2018-12-15 21:31:02 +01:00
return False
else:
2019-01-22 23:44:17 +01:00
for stream in streams:
2019-02-20 20:48:25 +01:00
message = f"Deleted file {stream.file_name}"
2020-01-29 01:24:05 +01:00
await self.file_manager.delete(stream, delete_file=delete_from_download_dir)
2019-02-20 20:48:25 +01:00
log.info(message)
2019-01-22 23:44:17 +01:00
result = True
return result
@requires(FILE_MANAGER_COMPONENT)
2019-04-18 20:39:28 +02:00
async def jsonrpc_file_save(self, file_name=None, download_directory=None, **kwargs):
"""
2019-05-17 19:06:59 +02:00
Start saving a file to disk.
2019-04-18 20:39:28 +02:00
Usage:
file_save [--file_name=<file_name>] [--download_directory=<download_directory>] [--sd_hash=<sd_hash>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
[--channel_name=<channel_name>]
Options:
2019-05-17 19:06:59 +02:00
--file_name=<file_name> : (str) file name to save to
--download_directory=<download_directory> : (str) directory to save into
--sd_hash=<sd_hash> : (str) save file with matching sd hash
--stream_hash=<stream_hash> : (str) save file with matching stream hash
--rowid=<rowid> : (int) save file with matching row id
--claim_id=<claim_id> : (str) save file with matching claim id
--txid=<txid> : (str) save file with matching claim txid
--nout=<nout> : (int) save file with matching claim nout
--claim_name=<claim_name> : (str) save file with matching claim name
--channel_claim_id=<channel_claim_id> : (str) save file with matching channel claim id
--channel_name=<channel_name> : (str) save file with matching channel claim name
2019-04-18 20:39:28 +02:00
Returns: {File}
"""
2020-01-29 01:24:05 +01:00
streams = self.file_manager.get_filtered(**kwargs)
2019-04-18 20:39:28 +02:00
if len(streams) > 1:
log.warning("There are %i matching files, use narrower filters to select one", len(streams))
return False
if not streams:
log.warning("There is no file to save")
return False
stream = streams[0]
if not hasattr(stream, 'bt_infohash') and 'dht' not in self.conf.components_to_skip:
stream.downloader.node = self.dht_node
2019-04-18 20:39:28 +02:00
await stream.save_file(file_name, download_directory)
return stream
PURCHASE_DOC = """
List and make purchases of claims.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_purchase_list(
self, claim_id=None, resolve=False, account_id=None, wallet_id=None, page=None, page_size=None):
"""
List my claim purchases.
Usage:
purchase_list [<claim_id> | --claim_id=<claim_id>] [--resolve]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
Options:
--claim_id=<claim_id> : (str) purchases for specific claim
--resolve : (str) include resolved claim information
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Output]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
constraints = {
"wallet": wallet,
"accounts": [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
"resolve": resolve,
}
if claim_id:
constraints["purchased_claim_id"] = claim_id
return paginate_rows(
self.ledger.get_purchases,
self.ledger.get_purchase_count,
page, page_size, **constraints
)
@requires(WALLET_COMPONENT)
async def jsonrpc_purchase_create(
self, claim_id=None, url=None, wallet_id=None, funding_account_ids=None,
allow_duplicate_purchase=False, override_max_key_fee=False, preview=False, blocking=False):
"""
Purchase a claim.
Usage:
purchase_create (--claim_id=<claim_id> | --url=<url>) [--wallet_id=<wallet_id>]
[--funding_account_ids=<funding_account_ids>...]
[--allow_duplicate_purchase] [--override_max_key_fee] [--preview] [--blocking]
Options:
2019-11-14 03:16:27 +01:00
--claim_id=<claim_id> : (str) claim id of claim to purchase
--url=<url> : (str) lookup claim to purchase by url
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--allow_duplicate_purchase : (bool) allow purchasing claim_id you already own
--override_max_key_fee : (bool) ignore max key fee for this purchase
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until transaction is in mempool
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
accounts = wallet.get_accounts_or_all(funding_account_ids)
txo = None
if claim_id:
2021-03-10 02:15:50 +01:00
txo = await self.ledger.get_claim_by_claim_id(claim_id, accounts, include_purchase_receipt=True)
if not isinstance(txo, Output) or not txo.is_claim:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception(f"Could not find claim with claim_id '{claim_id}'.")
elif url:
2020-03-22 17:23:21 +01:00
txo = (await self.ledger.resolve(accounts, [url], include_purchase_receipt=True))[url]
if not isinstance(txo, Output) or not txo.is_claim:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception(f"Could not find claim with url '{url}'.")
else:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception("Missing argument claim_id or url.")
if not allow_duplicate_purchase and txo.purchase_receipt:
2021-09-17 06:06:18 +02:00
raise AlreadyPurchasedError(claim_id)
claim = txo.claim
if not claim.is_stream or not claim.stream.has_fee:
# TODO: use error from lbry.error
raise Exception(f"Claim '{claim_id}' does not have a purchase price.")
tx = await self.wallet_manager.create_purchase_transaction(
accounts, txo, self.exchange_rate_manager, override_max_key_fee
)
if not preview:
await self.broadcast_or_release(tx, blocking)
else:
await self.ledger.release_tx(tx)
return tx
CLAIM_DOC = """
List and search all types of claims.
2019-01-23 19:00:58 +01:00
"""
2019-01-22 23:44:17 +01:00
@requires(WALLET_COMPONENT)
2020-03-07 06:34:47 +01:00
def jsonrpc_claim_list(self, claim_type=None, **kwargs):
2017-01-03 20:13:01 +01:00
"""
2019-03-26 03:06:36 +01:00
List my stream and channel claims.
2017-01-03 20:13:01 +01:00
2017-05-28 22:01:53 +02:00
Usage:
2020-04-02 02:44:34 +02:00
claim_list [--claim_type=<claim_type>...] [--claim_id=<claim_id>...] [--name=<name>...] [--is_spent]
[--channel_id=<channel_id>...] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--has_source | --has_no_source] [--page=<page>] [--page_size=<page_size>]
[--resolve] [--order_by=<order_by>] [--no_totals] [--include_received_tips]
2017-05-28 22:01:53 +02:00
Options:
2020-03-07 06:34:47 +01:00
--claim_type=<claim_type> : (str or list) claim type: channel, stream, repost, collection
--claim_id=<claim_id> : (str or list) claim id
--channel_id=<channel_id> : (str or list) streams in this channel
2020-03-07 06:34:47 +01:00
--name=<name> : (str or list) claim name
2020-04-02 02:44:34 +02:00
--is_spent : (bool) shows previous claim updates and abandons
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--has_source : (bool) list claims containing a source field
--has_no_source : (bool) list claims not containing a source field
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each claim to provide additional metadata
--order_by=<order_by> : (str) field to order by: 'name', 'height', 'amount'
2020-03-21 23:06:05 +01:00
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2021-02-17 00:38:07 +01:00
--include_received_tips : (bool) calculate the amount of tips received for claim outputs
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
"""
2020-03-07 06:34:47 +01:00
kwargs['type'] = claim_type or CLAIM_TYPE_NAMES
if not kwargs.get('is_spent', False):
2020-04-02 02:44:34 +02:00
kwargs['is_not_spent'] = True
2020-03-07 06:34:47 +01:00
return self.jsonrpc_txo_list(**kwargs)
2020-10-13 21:51:59 +02:00
async def jsonrpc_support_sum(self, claim_id, new_sdk_server, include_channel_content=False, **kwargs):
"""
List total staked supports for a claim, grouped by the channel that signed the support.
If claim_id is a channel claim, you can use --include_channel_content to also include supports for
content claims in the channel.
2020-10-13 21:51:59 +02:00
!!!! NOTE: PAGINATION DOES NOT DO ANYTHING AT THE MOMENT !!!!!
Usage:
support_sum <claim_id> <new_sdk_server>
[--include_channel_content]
[--page=<page>] [--page_size=<page_size>]
Options:
--claim_id=<claim_id> : (str) claim id
--new_sdk_server=<new_sdk_server> : (str) URL of the new SDK server (EXPERIMENTAL)
--include_channel_content : (bool) if claim_id is for a channel, include supports for claims in
that channel
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Dict]}
"""
page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', DEFAULT_PAGE_SIZE)), 50)
kwargs.update({'offset': page_size * (page_num - 1), 'limit': page_size})
2020-10-16 17:18:03 +02:00
support_sums = await self.ledger.sum_supports(
new_sdk_server, claim_id=claim_id, include_channel_content=include_channel_content, **kwargs
)
2020-10-13 21:51:59 +02:00
return {
"items": support_sums,
"page": page_num,
"page_size": page_size
}
@requires(WALLET_COMPONENT)
async def jsonrpc_claim_search(self, **kwargs):
"""
2019-03-26 03:06:36 +01:00
Search for stream and channel claims on the blockchain.
2017-05-28 22:01:53 +02:00
Arguments marked with "supports equality constraints" allow prepending the
2019-05-19 02:09:23 +02:00
value with an equality constraint such as '>', '>=', '<' and '<='
eg. --height=">400000" would limit results to only claims above 400k block height.
They also support multiple constraints passed as a list of the args described above.
eg. --release_time=[">1000000", "<2000000"]
Usage:
2019-11-14 20:31:49 +01:00
claim_search [<name> | --name=<name>] [--text=<text>] [--txid=<txid>] [--nout=<nout>]
[--claim_id=<claim_id> | --claim_ids=<claim_ids>...]
2019-09-12 20:44:09 +02:00
[--channel=<channel> |
[[--channel_ids=<channel_ids>...] [--not_channel_ids=<not_channel_ids>...]]]
[--has_channel_signature] [--valid_channel_signature | --invalid_channel_signature]
[--limit_claims_per_channel=<limit_claims_per_channel>]
2019-06-03 22:37:21 +02:00
[--is_controlling] [--release_time=<release_time>] [--public_key_id=<public_key_id>]
2019-05-28 04:20:21 +02:00
[--timestamp=<timestamp>] [--creation_timestamp=<creation_timestamp>]
[--height=<height>] [--creation_height=<creation_height>]
[--activation_height=<activation_height>] [--expiration_height=<expiration_height>]
[--amount=<amount>] [--effective_amount=<effective_amount>]
[--support_amount=<support_amount>] [--trending_group=<trending_group>]
[--trending_mixed=<trending_mixed>] [--trending_local=<trending_local>]
[--trending_global=<trending_global] [--trending_score=<trending_score]
[--reposted_claim_id=<reposted_claim_id>] [--reposted=<reposted>]
[--claim_type=<claim_type>] [--stream_types=<stream_types>...] [--media_types=<media_types>...]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>]
2020-01-29 21:41:40 +01:00
[--duration=<duration>]
[--any_tags=<any_tags>...] [--all_tags=<all_tags>...] [--not_tags=<not_tags>...]
2019-05-06 04:52:42 +02:00
[--any_languages=<any_languages>...] [--all_languages=<all_languages>...]
[--not_languages=<not_languages>...]
2019-05-06 04:52:42 +02:00
[--any_locations=<any_locations>...] [--all_locations=<all_locations>...]
[--not_locations=<not_locations>...]
2021-02-17 21:30:30 +01:00
[--order_by=<order_by>...] [--no_totals] [--page=<page>] [--page_size=<page_size>]
[--wallet_id=<wallet_id>] [--include_purchase_receipt] [--include_is_my_output]
2021-12-21 19:04:21 +01:00
[--remove_duplicates] [--has_source | --has_no_source] [--sd_hash=<sd_hash>]
2020-08-04 18:11:02 +02:00
[--new_sdk_server=<new_sdk_server>]
Options:
2021-09-09 09:09:46 +02:00
--name=<name> : (str) claim name (normalized)
2019-11-14 20:31:49 +01:00
--text=<text> : (str) full text search
2019-05-28 04:20:21 +02:00
--claim_id=<claim_id> : (str) full or partial claim id
--claim_ids=<claim_ids> : (list) list of full claim ids
2019-05-28 04:20:21 +02:00
--txid=<txid> : (str) transaction id
--nout=<nout> : (str) position in the transaction
--channel=<channel> : (str) claims signed by this channel (argument is
a URL which automatically gets resolved),
see --channel_ids if you need to filter by
multiple channels at the same time,
includes claims with invalid signatures,
use in conjunction with --valid_channel_signature
2019-07-15 18:44:06 +02:00
--channel_ids=<channel_ids> : (list) claims signed by any of these channels
2019-05-28 04:20:21 +02:00
(arguments must be claim ids of the channels),
includes claims with invalid signatures,
implies --has_channel_signature,
use in conjunction with --valid_channel_signature
2019-07-15 18:44:06 +02:00
--not_channel_ids=<not_channel_ids>: (list) exclude claims signed by any of these channels
(arguments must be claim ids of the channels)
--has_channel_signature : (bool) claims with a channel signature (valid or invalid)
--valid_channel_signature : (bool) claims with a valid channel signature or no signature,
use in conjunction with --has_channel_signature to
only get claims with valid signatures
--invalid_channel_signature : (bool) claims with invalid channel signature or no signature,
use in conjunction with --has_channel_signature to
only get claims with invalid signatures
2020-08-19 18:08:41 +02:00
--limit_claims_per_channel=<limit_claims_per_channel>: (int) only return up to the specified
number of claims per channel
--is_controlling : (bool) winning claims of their respective name
2019-06-04 02:56:19 +02:00
--public_key_id=<public_key_id> : (str) only return channels having this public key id, this is
2019-06-03 22:37:21 +02:00
the same key as used in the wallet file to map
channel certificate private keys: {'public_key_id': 'private key'}
2019-05-28 04:20:21 +02:00
--height=<height> : (int) last updated block height (supports equality constraints)
--timestamp=<timestamp> : (int) last updated timestamp (supports equality constraints)
--creation_height=<creation_height> : (int) created at block height (supports equality constraints)
--creation_timestamp=<creation_timestamp>: (int) created at timestamp (supports equality constraints)
--activation_height=<activation_height> : (int) height at which claim starts competing for name
(supports equality constraints)
--expiration_height=<expiration_height> : (int) height at which claim will expire
(supports equality constraints)
--release_time=<release_time> : (int) limit to claims self-described as having been
2019-05-18 05:54:03 +02:00
released to the public on or after this UTC
timestamp, when claim does not provide
a release time the publish time is used instead
(supports equality constraints)
--amount=<amount> : (int) limit by claim value (supports equality constraints)
2019-10-02 20:04:30 +02:00
--support_amount=<support_amount>: (int) limit by supports and tips received (supports
equality constraints)
2019-05-22 15:23:31 +02:00
--effective_amount=<effective_amount>: (int) limit by total value (initial claim value plus
all tips and supports received), this amount is
blank until claim has reached activation height
(supports equality constraints)
--trending_score=<trending_score>: (int) limit by trending score (supports equality constraints)
--trending_group=<trending_group>: (int) DEPRECATED - instead please use trending_score
--trending_mixed=<trending_mixed>: (int) DEPRECATED - instead please use trending_score
--trending_local=<trending_local>: (int) DEPRECATED - instead please use trending_score
--trending_global=<trending_global>: (int) DEPRECATED - instead please use trending_score
--reposted_claim_id=<reposted_claim_id>: (str) all reposts of the specified original claim id
--reposted=<reposted> : (int) claims reposted this many times (supports
equality constraints)
--claim_type=<claim_type> : (str) filter by 'channel', 'stream', 'repost' or 'collection'
--stream_types=<stream_types> : (list) filter by 'video', 'image', 'document', etc
--media_types=<media_types> : (list) filter by 'video/mp4', 'image/png', etc
--fee_currency=<fee_currency> : (string) specify fee currency: LBC, BTC, USD
--fee_amount=<fee_amount> : (decimal) content download fee (supports equality constraints)
2020-01-29 21:41:40 +01:00
--duration=<duration> : (int) duration of video or audio in seconds
(supports equality constraints)
2019-05-06 04:52:42 +02:00
--any_tags=<any_tags> : (list) find claims containing any of the tags
--all_tags=<all_tags> : (list) find claims containing every tag
--not_tags=<not_tags> : (list) find claims not containing any of these tags
2019-05-06 04:52:42 +02:00
--any_languages=<any_languages> : (list) find claims containing any of the languages
--all_languages=<all_languages> : (list) find claims containing every language
--not_languages=<not_languages> : (list) find claims not containing any of these languages
2019-05-06 04:52:42 +02:00
--any_locations=<any_locations> : (list) find claims containing any of the locations
--all_locations=<all_locations> : (list) find claims containing every location
--not_locations=<not_locations> : (list) find claims not containing any of these locations
2019-05-06 04:52:42 +02:00
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2019-07-15 18:44:06 +02:00
--order_by=<order_by> : (list) field to order by, default is descending order, to do an
2019-05-28 04:20:21 +02:00
ascending order prepend ^ to the field name, eg. '^amount'
available fields: 'name', 'height', 'release_time',
'publish_time', 'amount', 'effective_amount',
'support_amount', 'trending_group', 'trending_mixed',
'trending_local', 'trending_global', 'activation_height'
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2021-02-17 00:38:07 +01:00
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase receipts
2020-03-22 06:13:26 +01:00
--include_purchase_receipt : (bool) lookup and include a receipt if this wallet
has purchased the claim
--include_is_my_output : (bool) lookup and include a boolean indicating
if claim being resolved is yours
--remove_duplicates : (bool) removes duplicated content from search by picking either the
original claim or the oldest matching repost
--has_source : (bool) find claims containing a source field
2021-12-21 19:04:21 +01:00
--sd_hash=<sd_hash> : (str) find claims where the source stream descriptor hash matches
2021-12-08 08:03:55 +01:00
(partially or completely) the given hexadecimal string
--has_no_source : (bool) find claims not containing a source field
2020-08-04 20:12:40 +02:00
--new_sdk_server=<new_sdk_server> : (str) URL of the new SDK server (EXPERIMENTAL)
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
2019-03-26 03:06:36 +01:00
"""
2021-09-01 08:37:57 +02:00
if self.ledger.config.get('use_go_hub'):
host = self.ledger.network.client.server[0]
2021-06-23 23:28:32 +02:00
port = "50051"
kwargs['new_sdk_server'] = f"{host}:{port}"
if kwargs.get("channel"):
channel = kwargs.pop("channel")
channel_obj = (await self.jsonrpc_resolve(channel))[channel]
if isinstance(channel_obj, dict):
# This happens when the channel doesn't exist
kwargs["channel_id"] = ""
else:
kwargs["channel_id"] = channel_obj.claim_id
kwargs = fix_kwargs_for_hub(**kwargs)
else:
# Don't do this if using the hub server, it screws everything up
if "claim_ids" in kwargs and not kwargs["claim_ids"]:
kwargs.pop("claim_ids")
if {'claim_id', 'claim_ids'}.issubset(kwargs):
raise ConflictingInputValueError('claim_id', 'claim_ids')
if kwargs.pop('valid_channel_signature', False):
kwargs['signature_valid'] = 1
if kwargs.pop('invalid_channel_signature', False):
kwargs['signature_valid'] = 0
if 'has_no_source' in kwargs:
kwargs['has_source'] = not kwargs.pop('has_no_source')
if 'order_by' in kwargs: # TODO: remove this after removing support for old trending args from the api
value = kwargs.pop('order_by')
value = value if isinstance(value, list) else [value]
new_value = []
for new_v in value:
migrated = new_v if new_v not in (
'trending_mixed', 'trending_local', 'trending_global', 'trending_group'
) else 'trending_score'
if migrated not in new_value:
new_value.append(migrated)
kwargs['order_by'] = new_value
page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', DEFAULT_PAGE_SIZE)), 50)
wallet = self.wallet_manager.get_wallet_or_default(kwargs.pop('wallet_id', None))
kwargs.update({'offset': page_size * (page_num - 1), 'limit': page_size})
2021-08-11 05:45:10 +02:00
txos, blocked, _, total = await self.ledger.claim_search(wallet.accounts, **kwargs)
result = {
"items": txos,
"blocked": blocked,
"page": page_num,
"page_size": page_size
}
if not kwargs.pop('no_totals', False):
result['total_pages'] = int((total + (page_size - 1)) / page_size)
result['total_items'] = total
return result
2019-01-21 21:55:50 +01:00
CHANNEL_DOC = """
Create, update, abandon and list your channel claims.
2019-01-21 21:55:50 +01:00
"""
2019-03-25 03:59:55 +01:00
@deprecated('channel_create')
def jsonrpc_channel_new(self):
""" deprecated """
@requires(WALLET_COMPONENT)
2019-03-24 21:55:04 +01:00
async def jsonrpc_channel_create(
self, name, bid, allow_duplicate_name=False, account_id=None, wallet_id=None,
claim_address=None, funding_account_ids=None, preview=False, blocking=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
Create a new channel by generating a channel private key and establishing an '@' prefixed claim.
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
2019-03-25 03:59:55 +01:00
channel_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
[--allow_duplicate_name=<allow_duplicate_name>]
[--title=<title>] [--description=<description>] [--email=<email>]
[--website_url=<website_url>] [--featured=<featured>...]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
2019-06-10 07:26:35 +02:00
[--preview] [--blocking]
2017-04-07 02:45:05 +02:00
Options:
2019-03-24 21:55:04 +01:00
--name=<name> : (str) name of the channel prefixed with '@'
--bid=<bid> : (decimal) amount to back the claim
2019-03-24 21:55:04 +01:00
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with
given name. default: false.
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--email=<email> : (str) email of channel owner
--website_url=<website_url> : (str) website url
--featured=<featured> : (list) claim_ids of featured content in channel
--tags=<tags> : (list) content tags
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
2019-03-24 21:55:04 +01:00
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
2019-03-24 21:55:04 +01:00
--claim_address=<claim_address>: (str) address where the channel is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
2019-06-10 07:26:35 +02:00
--blocking : (bool) wait until transaction is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
2019-03-24 21:55:04 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
account = wallet.get_account_or_default(account_id)
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
2019-03-26 03:06:36 +01:00
self.valid_channel_name_or_error(name)
2019-03-24 21:55:04 +01:00
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
existing_channels = await self.ledger.get_channels(accounts=wallet.accounts, claim_name=name)
2019-03-24 21:55:04 +01:00
if len(existing_channels) > 0:
if not allow_duplicate_name:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception(
f"You already have a channel under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
2019-03-24 21:55:04 +01:00
claim = Claim()
claim.channel.update(**kwargs)
tx = await Transaction.claim_create(
name, claim, amount, claim_address, funding_accounts, funding_accounts[0]
2019-03-24 21:55:04 +01:00
)
txo = tx.outputs[0]
2021-10-13 16:56:10 +02:00
txo.set_channel_private_key(
await funding_accounts[0].generate_channel_private_key()
)
2019-03-24 21:55:04 +01:00
await tx.sign(funding_accounts)
2019-03-24 21:55:04 +01:00
if not preview:
wallet.save()
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
self.component_manager.loop.create_task(self.storage.save_claims([self._old_get_temp_claim_info(
2019-03-24 21:55:04 +01:00
tx, txo, claim_address, claim, name, dewies_to_lbc(amount)
)]))
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_new_channel())
2019-03-24 21:55:04 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
2019-03-24 21:55:04 +01:00
async def jsonrpc_channel_update(
self, claim_id, bid=None, account_id=None, wallet_id=None, claim_address=None,
funding_account_ids=None, new_signing_key=False, preview=False,
blocking=False, replace=False, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
Update an existing channel claim.
2018-08-17 21:02:14 +02:00
2019-03-24 21:55:04 +01:00
Usage:
2019-03-25 03:59:55 +01:00
channel_update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>]
[--title=<title>] [--description=<description>] [--email=<email>]
[--website_url=<website_url>]
[--featured=<featured>...] [--clear_featured]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--new_signing_key]
[--funding_account_ids=<funding_account_ids>...]
2019-06-10 07:26:35 +02:00
[--preview] [--blocking] [--replace]
2019-03-24 21:55:04 +01:00
Options:
--claim_id=<claim_id> : (str) claim_id of the channel to update
--bid=<bid> : (decimal) amount to back the claim
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--email=<email> : (str) email of channel owner
--website_url=<website_url> : (str) website url
--featured=<featured> : (list) claim_ids of featured content in channel
--clear_featured : (bool) clear existing featured content (prior to adding new ones)
--tags=<tags> : (list) add content tags
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--clear_locations : (bool) clear existing locations (prior to adding new ones)
2019-03-24 21:55:04 +01:00
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--cover_url=<cover_url> : (str) url of cover image
--account_id=<account_id> : (str) account in which to look for channel (default: all)
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
2019-03-24 21:55:04 +01:00
--claim_address=<claim_address>: (str) address where the channel is sent
2019-03-29 17:10:27 +01:00
--new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes
--preview : (bool) do not broadcast the transaction
2019-06-10 07:26:35 +02:00
--blocking : (bool) wait until transaction is in mempool
--replace : (bool) instead of modifying specific values on
the channel, this will clear all existing values
and only save passed in values, useful for form
submissions where all values are always set
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
2019-03-24 21:55:04 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
else:
account = wallet.default_account
accounts = wallet.accounts
2019-03-24 21:55:04 +01:00
existing_channels = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
2019-03-24 21:55:04 +01:00
if len(existing_channels) != 1:
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception(
f"Can't find the channel '{claim_id}' in account(s) {account_ids}."
2019-03-24 21:55:04 +01:00
)
old_txo = existing_channels[0]
if not old_txo.claim.is_channel:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception(
f"A claim with id '{claim_id}' was found but it is not a channel."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
2019-03-24 21:55:04 +01:00
if claim_address is not None:
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(claim_address)
2019-03-24 21:55:04 +01:00
else:
claim_address = old_txo.get_address(account.ledger)
if replace:
claim = Claim()
claim.channel.public_key_bytes = old_txo.claim.channel.public_key_bytes
else:
claim = Claim.from_bytes(old_txo.claim.to_bytes())
claim.channel.update(**kwargs)
2019-03-24 21:55:04 +01:00
tx = await Transaction.claim_update(
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0]
)
2019-03-24 21:55:04 +01:00
new_txo = tx.outputs[0]
if new_signing_key:
new_txo.set_channel_private_key(
await funding_accounts[0].generate_channel_private_key()
)
2019-03-24 21:55:04 +01:00
else:
new_txo.private_key = old_txo.private_key
new_txo.script.generate()
await tx.sign(funding_accounts)
2019-03-24 21:55:04 +01:00
if not preview:
wallet.save()
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
self.component_manager.loop.create_task(self.storage.save_claims([self._old_get_temp_claim_info(
2019-03-24 21:55:04 +01:00
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)]))
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_new_channel())
2019-03-24 21:55:04 +01:00
else:
await account.ledger.release_tx(tx)
return tx
2017-04-07 02:45:05 +02:00
2020-12-24 05:55:58 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_channel_sign(
self, channel_name=None, channel_id=None, hexdata=None, channel_account_id=None, wallet_id=None):
"""
Signs data using the specified channel signing key.
Usage:
channel_sign [<channel_name> | --channel_name=<channel_name>]
[<channel_id> | --channel_id=<channel_id>] [<hexdata> | --hexdata=<hexdata>]
[--channel_account_id=<channel_account_id>...] [--wallet_id=<wallet_id>]
Options:
--channel_name=<channel_name> : (str) name of channel used to sign (or use channel id)
--channel_id=<channel_id> : (str) claim id of channel used to sign (or use channel name)
--hexdata=<hexdata> : (str) data to sign, encoded as hexadecimal
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2021-01-28 22:20:04 +01:00
Returns:
(dict) Signature if successfully made, (None) or an error otherwise
{
"signature": (str) The signature of the comment,
"signing_ts": (str) The timestamp used to sign the comment,
}
2020-12-24 05:55:58 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
signing_channel = await self.get_channel_or_error(
wallet, channel_account_id, channel_id, channel_name, for_signing=True
)
timestamp = str(int(time.time()))
2022-01-10 14:44:28 +01:00
signature = signing_channel.sign_data(unhexlify(str(hexdata)), timestamp)
return {
'signature': signature,
'signing_ts': timestamp
}
2020-12-24 05:55:58 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_channel_abandon(
self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None,
preview=False, blocking=True):
"""
Abandon one of my channel claims.
Usage:
channel_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
else:
account = wallet.default_account
accounts = wallet.accounts
if txid is not None and nout is not None:
claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, **{'txo.txid': txid, 'txo.position': nout}
)
elif claim_id is not None:
claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
else:
# TODO: use error from lbry.error
raise Exception('Must specify claim_id, or txid and nout')
if not claims:
# TODO: use error from lbry.error
raise Exception('No claim found for the specified claim_id or txid:nout')
tx = await Transaction.create(
[Input.spend(txo) for txo in claims], [], [account], account
)
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
2020-03-07 06:34:47 +01:00
def jsonrpc_channel_list(self, *args, **kwargs):
2017-04-07 02:45:05 +02:00
"""
2019-03-26 03:06:36 +01:00
List my channel claims.
2017-04-07 02:45:05 +02:00
2017-05-28 22:01:53 +02:00
Usage:
channel_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
2020-04-02 02:44:34 +02:00
[--name=<name>...] [--claim_id=<claim_id>...] [--is_spent]
2020-03-21 23:06:05 +01:00
[--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals]
2017-05-28 22:01:53 +02:00
Options:
2020-03-07 06:34:47 +01:00
--name=<name> : (str or list) channel name
--claim_id=<claim_id> : (str or list) channel id
2020-04-02 02:44:34 +02:00
--is_spent : (bool) shows previous channel updates and abandons
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each channel to provide additional metadata
2020-03-21 23:06:05 +01:00
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
2017-04-07 02:45:05 +02:00
"""
2020-03-07 06:34:47 +01:00
kwargs['type'] = 'channel'
if 'is_spent' not in kwargs or not kwargs['is_spent']:
2020-04-02 02:44:34 +02:00
kwargs['is_not_spent'] = True
2020-03-07 06:34:47 +01:00
return self.jsonrpc_txo_list(*args, **kwargs)
2017-04-07 02:45:05 +02:00
@requires(WALLET_COMPONENT)
async def jsonrpc_channel_export(self, channel_id=None, channel_name=None, account_id=None, wallet_id=None):
2017-11-22 19:46:34 +01:00
"""
2019-05-29 07:21:54 +02:00
Export channel private key.
2017-11-22 19:46:34 +01:00
Usage:
2019-05-29 07:21:54 +02:00
channel_export (<channel_id> | --channel_id=<channel_id> | --channel_name=<channel_name>)
[--account_id=<account_id>...] [--wallet_id=<wallet_id>]
2017-11-22 19:46:34 +01:00
Options:
2019-05-29 07:21:54 +02:00
--channel_id=<channel_id> : (str) claim id of channel to export
--channel_name=<channel_name> : (str) name of channel to export
--account_id=<account_id> : (str) one or more account ids for accounts
to look in for channels, defaults to
all accounts.
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2017-11-22 19:46:34 +01:00
Returns:
2019-05-29 07:21:54 +02:00
(str) serialized channel private key
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
channel = await self.get_channel_or_error(wallet, account_id, channel_id, channel_name, for_signing=True)
2019-05-29 07:21:54 +02:00
address = channel.get_address(self.ledger)
public_key = await self.ledger.get_public_key_for_address(wallet, address)
2019-05-29 07:21:54 +02:00
if not public_key:
# TODO: use error from lbry.error
2019-05-29 07:21:54 +02:00
raise Exception("Can't find public key for address holding the channel.")
export = {
'name': channel.claim_name,
'channel_id': channel.claim_id,
'holding_address': address,
'holding_public_key': public_key.extended_key_string(),
'signing_private_key': channel.private_key.signing_key.to_pem().decode()
2019-05-29 07:21:54 +02:00
}
return base58.b58encode(json.dumps(export, separators=(',', ':')))
2017-11-22 19:46:34 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_channel_import(self, channel_data, wallet_id=None):
2017-11-22 19:46:34 +01:00
"""
2019-05-29 07:21:54 +02:00
Import serialized channel private key (to allow signing new streams to the channel)
2017-11-22 19:46:34 +01:00
Usage:
channel_import (<channel_data> | --channel_data=<channel_data>) [--wallet_id=<wallet_id>]
2017-11-22 19:46:34 +01:00
Options:
2019-05-29 07:21:54 +02:00
--channel_data=<channel_data> : (str) serialized channel, as exported by channel export
--wallet_id=<wallet_id> : (str) import into specific wallet
2017-11-22 19:46:34 +01:00
Returns:
(dict) Result dictionary
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2019-05-29 07:21:54 +02:00
decoded = base58.b58decode(channel_data)
data = json.loads(decoded)
channel_private_key = PrivateKey.from_pem(
self.ledger, data['signing_private_key']
2019-05-29 07:21:54 +02:00
)
# check that the holding_address hasn't changed since the export was made
holding_address = data['holding_address']
2020-01-19 02:58:30 +01:00
channels, _, _, _ = await self.ledger.claim_search(
wallet.accounts, public_key_id=channel_private_key.address
)
if channels and channels[0].get_address(self.ledger) != holding_address:
holding_address = channels[0].get_address(self.ledger)
2020-01-03 04:18:49 +01:00
account = await self.ledger.get_account_for_address(wallet, holding_address)
if account:
# Case 1: channel holding address is in one of the accounts we already have
# simply add the certificate to existing account
pass
else:
# Case 2: channel holding address hasn't changed and thus is in the bundled read-only account
# create a single-address holding account to manage the channel
if holding_address == data['holding_address']:
2020-01-03 04:18:49 +01:00
account = Account.from_dict(self.ledger, wallet, {
'name': f"Holding Account For Channel {data['name']}",
'public_key': data['holding_public_key'],
'address_generator': {'name': 'single-address'}
})
if self.ledger.network.is_connected:
await self.ledger.subscribe_account(account)
await self.ledger._update_tasks.done.wait()
# Case 3: the holding address has changed and we can't create or find an account for it
else:
# TODO: use error from lbry.error
raise Exception(
"Channel owning account has changed since the channel was exported and "
"it is not an account to which you have access."
)
2019-05-29 07:21:54 +02:00
account.add_channel_private_key(channel_private_key)
wallet.save()
2019-05-29 07:21:54 +02:00
return f"Added channel signing key for {data['name']}."
2017-11-22 19:46:34 +01:00
STREAM_DOC = """
Create, update, abandon, list and inspect your stream claims.
2019-03-24 21:55:04 +01:00
"""
@requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT)
async def jsonrpc_publish(self, name, **kwargs):
"""
Create or replace a stream claim at a given name (use 'stream create/update' for more control).
Usage:
2021-03-10 22:44:37 +01:00
publish (<name> | --name=<name>) [--bid=<bid>] [--file_path=<file_path>]
[--file_name=<file_name>] [--file_hash=<file_hash>] [--validate_file] [--optimize_file]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
[--sd_hash=<sd_hash>] [--channel_id=<channel_id> | --channel_name=<channel_name>]
2019-03-27 21:02:17 +01:00
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
2019-06-10 07:26:35 +02:00
[--preview] [--blocking]
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
--bid=<bid> : (decimal) amount to back the claim
--file_path=<file_path> : (str) path to file to be associated with name.
--file_name=<file_name> : (str) name of file to be associated with stream.
--file_hash=<file_hash> : (str) hash of file to be associated with stream.
--validate_file : (bool) validate that the video container and encodings match
common web browser support or that optimization succeeds if specified.
FFmpeg is required
--optimize_file : (bool) transcode the video & audio if necessary to ensure
common web browser support. FFmpeg is required
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--tags=<tags> : (list) add content tags
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
--width=<width> : (int) image/video width, automatically calculated from media file
--height=<height> : (int) image/video height, automatically calculated from media file
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
--sd_hash=<sd_hash> : (str) sd_hash of stream
--channel_id=<channel_id> : (str) claim id of the publisher channel
2019-03-27 21:02:17 +01:00
--channel_name=<channel_name> : (str) name of publisher channel
2019-06-03 05:09:56 +02:00
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
2019-06-10 07:26:35 +02:00
--blocking : (bool) wait until transaction is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
self.valid_stream_name_or_error(name)
wallet = self.wallet_manager.get_wallet_or_default(kwargs.get('wallet_id'))
if kwargs.get('account_id'):
accounts = [wallet.get_account_or_error(kwargs.get('account_id'))]
else:
accounts = wallet.accounts
claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, claim_name=name
)
if len(claims) == 0:
2019-03-27 21:02:17 +01:00
if 'bid' not in kwargs:
# TODO: use error from lbry.error
2019-03-27 21:02:17 +01:00
raise Exception("'bid' is a required argument for new publishes.")
return await self.jsonrpc_stream_create(name, **kwargs)
elif len(claims) == 1:
assert claims[0].claim.is_stream, f"Claim at name '{name}' is not a stream claim."
return await self.jsonrpc_stream_update(claims[0].claim_id, replace=True, **kwargs)
# TODO: use error from lbry.error
raise Exception(
f"There are {len(claims)} claims for '{name}', please use 'stream update' command "
f"to update a specific stream claim."
)
@requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT)
async def jsonrpc_stream_repost(
self, name, bid, claim_id, allow_duplicate_name=False, channel_id=None,
channel_name=None, channel_account_id=None, account_id=None, wallet_id=None,
claim_address=None, funding_account_ids=None, preview=False, blocking=False, **kwargs):
2019-10-14 10:17:37 +02:00
"""
Creates a claim that references an existing stream by its claim id.
Usage:
stream_repost (<name> | --name=<name>) (<bid> | --bid=<bid>) (<claim_id> | --claim_id=<claim_id>)
[--allow_duplicate_name=<allow_duplicate_name>]
[--title=<title>] [--description=<description>] [--tags=<tags>...]
2019-10-14 10:17:37 +02:00
[--channel_id=<channel_id> | --channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
[--preview] [--blocking]
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
--bid=<bid> : (decimal) amount to back the claim
--claim_id=<claim_id> : (str) id of the claim being reposted
2019-10-14 10:17:37 +02:00
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
given name. default: false.
--title=<title> : (str) title of the repost
--description=<description> : (str) description of the repost
--tags=<tags> : (list) add repost tags
2019-10-14 10:17:37 +02:00
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_name=<channel_name> : (str) name of the publisher channel
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until transaction is in mempool
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
self.valid_stream_name_or_error(name)
account = wallet.get_account_or_default(account_id)
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
claims = await account.get_claims(claim_name=name)
if len(claims) > 0:
if not allow_duplicate_name:
# TODO: use error from lbry.error
2019-10-14 10:17:37 +02:00
raise Exception(
f"You already have a stream claim published under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
if not VALID_FULL_CLAIM_ID.fullmatch(claim_id):
# TODO: use error from lbry.error
2019-10-14 10:17:37 +02:00
raise Exception('Invalid claim id. It is expected to be a 40 characters long hexadecimal string.')
claim = Claim()
claim.repost.update(**kwargs)
2019-10-14 10:17:37 +02:00
claim.repost.reference.claim_id = claim_id
tx = await Transaction.claim_create(
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
)
new_txo = tx.outputs[0]
if channel:
new_txo.sign(channel)
await tx.sign(funding_accounts)
if not preview:
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
2019-10-14 10:17:37 +02:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT)
async def jsonrpc_stream_create(
self, name, bid, file_path=None, allow_duplicate_name=False,
2019-03-27 21:02:17 +01:00
channel_id=None, channel_name=None, channel_account_id=None,
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
2021-03-10 22:44:37 +01:00
preview=False, blocking=False, validate_file=False, optimize_file=False, **kwargs):
2019-03-24 21:55:04 +01:00
"""
Make a new stream claim and announce the associated file to lbrynet.
2017-05-28 22:01:53 +02:00
Usage:
stream_create (<name> | --name=<name>) (<bid> | --bid=<bid>) [<file_path> | --file_path=<file_path>]
[--file_name=<file_name>] [--file_hash=<file_hash>] [--validate_file] [--optimize_file]
[--allow_duplicate_name=<allow_duplicate_name>]
2019-03-24 21:55:04 +01:00
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
[--title=<title>] [--description=<description>] [--author=<author>]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
2019-03-24 21:55:04 +01:00
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
[--sd_hash=<sd_hash>] [--channel_id=<channel_id> | --channel_name=<channel_name>]
2019-03-27 21:02:17 +01:00
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
2019-06-10 07:26:35 +02:00
[--preview] [--blocking]
2017-05-28 22:01:53 +02:00
Options:
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
--bid=<bid> : (decimal) amount to back the claim
2019-03-24 21:55:04 +01:00
--file_path=<file_path> : (str) path to file to be associated with name.
--file_name=<file_name> : (str) name of file to be associated with stream.
--file_hash=<file_hash> : (str) hash of file to be associated with stream.
--validate_file : (bool) validate that the video container and encodings match
common web browser support or that optimization succeeds if specified.
FFmpeg is required
--optimize_file : (bool) transcode the video & audio if necessary to ensure
common web browser support. FFmpeg is required
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
given name. default: false.
2019-03-24 21:55:04 +01:00
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
2018-08-31 00:05:10 +02:00
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--tags=<tags> : (list) add content tags
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
2019-03-24 21:55:04 +01:00
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
--width=<width> : (int) image/video width, automatically calculated from media file
--height=<height> : (int) image/video height, automatically calculated from media file
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
--sd_hash=<sd_hash> : (str) sd_hash of stream
2019-03-24 21:55:04 +01:00
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_name=<channel_name> : (str) name of the publisher channel
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
2019-03-24 21:55:04 +01:00
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
2019-03-24 21:55:04 +01:00
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
2019-06-10 07:26:35 +02:00
--blocking : (bool) wait until transaction is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
2019-03-24 21:55:04 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
self.valid_stream_name_or_error(name)
account = wallet.get_account_or_default(account_id)
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
2019-03-24 21:55:04 +01:00
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
kwargs['fee_address'] = self.get_fee_address(kwargs, claim_address)
claims = await account.get_claims(claim_name=name)
if len(claims) > 0:
2019-03-24 21:55:04 +01:00
if not allow_duplicate_name:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise Exception(
f"You already have a stream claim published under the name '{name}'. "
2019-03-24 21:55:04 +01:00
f"Use --allow-duplicate-name flag to override."
)
2021-03-10 22:44:37 +01:00
if file_path is not None:
file_path, spec = await self._video_file_analyzer.verify_or_repair(
validate_file, optimize_file, file_path, ignore_non_video=True
)
kwargs.update(spec)
2019-03-24 21:55:04 +01:00
claim = Claim()
if file_path is not None:
2021-03-10 23:13:32 +01:00
claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs)
else:
claim.stream.update(**kwargs)
2019-03-24 21:55:04 +01:00
tx = await Transaction.claim_create(
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
2019-03-24 21:55:04 +01:00
)
new_txo = tx.outputs[0]
file_stream = None
2021-03-10 23:13:32 +01:00
if not preview and file_path is not None:
file_stream = await self.file_manager.create_stream(file_path)
claim.stream.source.sd_hash = file_stream.sd_hash
new_txo.script.generate()
if channel:
new_txo.sign(channel)
2019-08-12 08:52:37 +02:00
await tx.sign(funding_accounts)
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
async def save_claims():
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
)])
2021-03-10 22:44:37 +01:00
if file_path is not None:
await self.storage.save_content_claim(file_stream.stream_hash, new_txo.id)
self.component_manager.loop.create_task(save_claims())
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
2019-03-24 21:55:04 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT, FILE_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT)
async def jsonrpc_stream_update(
self, claim_id, bid=None, file_path=None,
2019-03-27 21:02:17 +01:00
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
2020-03-16 23:02:54 +01:00
preview=False, blocking=False, replace=False, validate_file=False, optimize_file=False, **kwargs):
2019-03-24 21:55:04 +01:00
"""
Update an existing stream claim and if a new file is provided announce it to lbrynet.
2019-03-24 21:55:04 +01:00
Usage:
stream_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] [--file_path=<file_path>]
2020-03-16 23:02:54 +01:00
[--validate_file] [--optimize_file]
[--file_name=<file_name>] [--file_size=<file_size>] [--file_hash=<file_hash>]
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>]
[--fee_address=<fee_address>] [--clear_fee]
[--title=<title>] [--description=<description>] [--author=<author>]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
2019-03-24 21:55:04 +01:00
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
[--sd_hash=<sd_hash>] [--channel_id=<channel_id> | --channel_name=<channel_name> | --clear_channel]
2019-03-27 21:02:17 +01:00
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
2019-06-10 07:26:35 +02:00
[--preview] [--blocking] [--replace]
2019-03-24 21:55:04 +01:00
Options:
--claim_id=<claim_id> : (str) id of the stream claim to update
2019-03-24 21:55:04 +01:00
--bid=<bid> : (decimal) amount to back the claim
--file_path=<file_path> : (str) path to file to be associated with name.
2020-03-16 23:02:54 +01:00
--validate_file : (bool) validate that the video container and encodings match
common web browser support or that optimization succeeds if specified.
FFmpeg is required and file_path must be specified.
--optimize_file : (bool) transcode the video & audio if necessary to ensure common
web browser support. FFmpeg is required and file_path must be specified.
--file_name=<file_name> : (str) override file name, defaults to name from file_path.
--file_size=<file_size> : (str) override file size, otherwise automatically computed.
--file_hash=<file_hash> : (str) override file hash, otherwise automatically computed.
2019-03-24 21:55:04 +01:00
--fee_currency=<fee_currency> : (string) specify fee currency
--fee_amount=<fee_amount> : (decimal) content download fee
--fee_address=<fee_address> : (str) address where to send fee payments, will use
value from --claim_address if not provided
--clear_fee : (bool) clear previously set fee
2019-03-24 21:55:04 +01:00
--title=<title> : (str) title of the publication
--description=<description> : (str) description of the publication
--author=<author> : (str) author of the publication. The usage for this field is not
the same as for channels. The author field is used to credit an author
who is not the publisher and is not represented by the channel. For
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
by published to a channel such as '@classics', or to no channel at all
--tags=<tags> : (list) add content tags
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--languages=<languages> : (list) languages used by the channel,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_languages : (bool) clear existing languages (prior to adding new ones)
2019-03-30 01:30:08 +01:00
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--clear_locations : (bool) clear existing locations (prior to adding new ones)
2019-03-24 21:55:04 +01:00
--license=<license> : (str) publication license
--license_url=<license_url> : (str) publication license url
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
--width=<width> : (int) image/video width, automatically calculated from media file
--height=<height> : (int) image/video height, automatically calculated from media file
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
--sd_hash=<sd_hash> : (str) sd_hash of stream
2019-03-24 21:55:04 +01:00
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_name=<channel_name> : (str) name of the publisher channel
2019-03-27 21:02:17 +01:00
--clear_channel : (bool) remove channel signature
2019-06-03 05:09:56 +02:00
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account in which to look for stream (default: all)
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
2019-06-10 07:26:35 +02:00
--blocking : (bool) wait until transaction is in mempool
--replace : (bool) instead of modifying specific values on
the stream, this will clear all existing values
and only save passed in values, useful for form
submissions where all values are always set
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
2016-03-24 03:27:48 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
else:
account = wallet.default_account
accounts = wallet.accounts
existing_claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
if len(existing_claims) != 1:
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
raise InputValueError(
f"Can't find the stream '{claim_id}' in account(s) {account_ids}."
)
old_txo = existing_claims[0]
if not old_txo.claim.is_stream and not old_txo.claim.is_repost:
# in principle it should work with any type of claim, but its safer to
# limit it to ones we know won't be broken. in the future we can expand
# this if we have a test case for e.g. channel or support claims
raise InputValueError(
f"A claim with id '{claim_id}' was found but it is not a stream or repost claim."
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
if claim_address is not None:
self.valid_address_or_error(claim_address)
else:
claim_address = old_txo.get_address(account.ledger)
channel = None
if not clear_channel and (channel_id or channel_name):
channel = await self.get_channel_or_error(
wallet, channel_account_id, channel_id, channel_name, for_signing=True)
elif old_txo.claim.is_signed and not clear_channel and not replace:
channel = old_txo.channel
2019-03-25 00:45:54 +01:00
2019-06-19 11:33:47 +02:00
fee_address = self.get_fee_address(kwargs, claim_address)
if fee_address:
kwargs['fee_address'] = fee_address
file_path, spec = await self._video_file_analyzer.verify_or_repair(
2020-03-16 23:02:54 +01:00
validate_file, optimize_file, file_path, ignore_non_video=True
)
kwargs.update(spec)
2020-03-16 23:02:54 +01:00
if replace:
claim = Claim()
if old_txo.claim.is_stream:
if old_txo.claim.stream.has_source:
claim.stream.message.source.CopyFrom(
old_txo.claim.stream.message.source
)
stream_type = old_txo.claim.stream.stream_type
if stream_type:
old_stream_type = getattr(old_txo.claim.stream.message, stream_type)
new_stream_type = getattr(claim.stream.message, stream_type)
new_stream_type.CopyFrom(old_stream_type)
else:
claim = Claim.from_bytes(old_txo.claim.to_bytes())
if old_txo.claim.is_stream:
claim.stream.update(file_path=file_path, **kwargs)
elif old_txo.claim.is_repost:
claim.repost.update(**kwargs)
if clear_channel:
claim.clear_signature()
tx = await Transaction.claim_update(
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0],
channel if not clear_channel else None
)
new_txo = tx.outputs[0]
stream_hash = None
if not preview and old_txo.claim.is_stream:
old_stream = self.file_manager.get_filtered(sd_hash=old_txo.claim.stream.source.sd_hash)
old_stream = old_stream[0] if old_stream else None
if file_path is not None:
if old_stream:
2020-02-07 16:32:39 +01:00
await self.file_manager.delete(old_stream, delete_file=False)
file_stream = await self.file_manager.create_stream(file_path)
2019-04-20 07:12:43 +02:00
new_txo.claim.stream.source.sd_hash = file_stream.sd_hash
new_txo.script.generate()
stream_hash = file_stream.stream_hash
elif old_stream:
stream_hash = old_stream.stream_hash
if channel:
new_txo.sign(channel)
2019-08-12 08:52:37 +02:00
await tx.sign(funding_accounts)
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
async def save_claims():
await self.storage.save_claims([self._old_get_temp_claim_info(
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
)])
if stream_hash:
await self.storage.save_content_claim(stream_hash, new_txo.id)
self.component_manager.loop.create_task(save_claims())
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
2019-03-25 14:59:32 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
async def jsonrpc_stream_abandon(
self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None,
2019-06-10 07:26:35 +02:00
preview=False, blocking=False):
"""
Abandon one of my stream claims.
Usage:
stream_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
else:
account = wallet.default_account
accounts = wallet.accounts
if txid is not None and nout is not None:
claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, **{'txo.txid': txid, 'txo.position': nout}
)
elif claim_id is not None:
claims = await self.ledger.get_claims(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
else:
# TODO: use error from lbry.error
raise Exception('Must specify claim_id, or txid and nout')
if not claims:
# TODO: use error from lbry.error
raise Exception('No claim found for the specified claim_id or txid:nout')
tx = await Transaction.create(
[Input.spend(txo) for txo in claims], [], accounts, account
)
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
else:
await self.ledger.release_tx(tx)
return tx
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT)
2020-03-07 06:34:47 +01:00
def jsonrpc_stream_list(self, *args, **kwargs):
2016-08-08 08:32:56 +02:00
"""
List my stream claims.
2016-08-08 08:32:56 +02:00
2017-05-28 22:01:53 +02:00
Usage:
stream_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
2020-04-02 02:44:34 +02:00
[--name=<name>...] [--claim_id=<claim_id>...] [--is_spent]
2020-03-21 23:06:05 +01:00
[--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals]
2017-05-28 22:01:53 +02:00
Options:
2020-03-07 06:34:47 +01:00
--name=<name> : (str or list) stream name
--claim_id=<claim_id> : (str or list) stream id
2020-04-02 02:44:34 +02:00
--is_spent : (bool) shows previous stream updates and abandons
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
2019-03-24 21:55:04 +01:00
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each stream to provide additional metadata
2020-03-21 23:06:05 +01:00
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
2016-08-08 08:32:56 +02:00
"""
2020-03-07 06:34:47 +01:00
kwargs['type'] = 'stream'
2020-04-02 02:44:34 +02:00
if 'is_spent' not in kwargs:
kwargs['is_not_spent'] = True
2020-03-07 06:34:47 +01:00
return self.jsonrpc_txo_list(*args, **kwargs)
2019-03-24 21:55:04 +01:00
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
DHT_COMPONENT, DATABASE_COMPONENT)
def jsonrpc_stream_cost_estimate(self, uri):
2019-03-24 21:55:04 +01:00
"""
Get estimated cost for a lbry stream
2019-03-24 21:55:04 +01:00
Usage:
stream_cost_estimate (<uri> | --uri=<uri>)
2019-03-24 21:55:04 +01:00
Options:
--uri=<uri> : (str) uri to use
2019-03-24 21:55:04 +01:00
2019-03-25 03:20:17 +01:00
Returns:
(float) Estimated cost in lbry credits, returns None if uri is not
resolvable
2019-03-25 03:20:17 +01:00
"""
return self.get_est_cost_from_uri(uri)
2019-03-24 21:55:04 +01:00
2019-11-13 23:50:35 +01:00
COLLECTION_DOC = """
2019-11-14 19:25:33 +01:00
Create, update, list, resolve, and abandon collections.
2019-11-13 23:50:35 +01:00
"""
2021-03-04 04:14:48 +01:00
2019-11-13 23:50:35 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_collection_create(
self, name, bid, claims, allow_duplicate_name=False,
channel_id=None, channel_name=None, channel_account_id=None,
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
preview=False, blocking=False, **kwargs):
"""
Create a new collection.
2019-11-13 23:50:35 +01:00
Usage:
collection_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
(--claims=<claims>...)
[--allow_duplicate_name]
[--title=<title>] [--description=<description>]
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
[--thumbnail_url=<thumbnail_url>]
[--channel_id=<channel_id> | --channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
[--preview] [--blocking]
2019-11-13 23:50:35 +01:00
Options:
--name=<name> : (str) name of the collection
--bid=<bid> : (decimal) amount to back the claim
--claims=<claims> : (list) claim ids to be included in the collection
--allow_duplicate_name : (bool) create new collection even if one already exists with
given name. default: false.
--title=<title> : (str) title of the collection
--description=<description> : (str) description of the collection
--tags=<tags> : (list) content tags
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--languages=<languages> : (list) languages used by the collection,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--locations=<locations> : (list) locations of the collection, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_name=<channel_name> : (str) name of the publisher channel
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
2019-11-13 23:50:35 +01:00
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--claim_address=<claim_address>: (str) address where the collection is sent to, if not specified
it will be determined automatically from the account
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until transaction is in mempool
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
account = wallet.get_account_or_default(account_id)
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
self.valid_collection_name_or_error(name)
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
claim_address = await self.get_receiving_address(claim_address, account)
existing_collections = await self.ledger.get_collections(accounts=wallet.accounts, claim_name=name)
if len(existing_collections) > 0:
if not allow_duplicate_name:
# TODO: use error from lbry.error
2019-11-13 23:50:35 +01:00
raise Exception(
f"You already have a collection under the name '{name}'. "
f"Use --allow-duplicate-name flag to override."
)
claim = Claim()
2019-11-14 19:25:33 +01:00
claim.collection.update(claims=claims, **kwargs)
2019-11-13 23:50:35 +01:00
tx = await Transaction.claim_create(
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
)
new_txo = tx.outputs[0]
if channel:
new_txo.sign(channel)
await tx.sign(funding_accounts)
2019-11-13 23:50:35 +01:00
if not preview:
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
2019-11-13 23:50:35 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
async def jsonrpc_collection_update(
2019-11-14 19:25:33 +01:00
self, claim_id, bid=None,
2019-11-13 23:50:35 +01:00
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
preview=False, blocking=False, replace=False, **kwargs):
"""
Update an existing collection claim.
Usage:
collection_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>]
2019-11-14 19:25:33 +01:00
[--claims=<claims>...] [--clear_claims]
2019-11-13 23:50:35 +01:00
[--title=<title>] [--description=<description>]
[--tags=<tags>...] [--clear_tags]
[--languages=<languages>...] [--clear_languages]
[--locations=<locations>...] [--clear_locations]
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
[--channel_id=<channel_id> | --channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...]
2019-11-13 23:50:35 +01:00
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--claim_address=<claim_address>]
2019-11-13 23:50:35 +01:00
[--funding_account_ids=<funding_account_ids>...]
[--preview] [--blocking] [--replace]
Options:
--claim_id=<claim_id> : (str) claim_id of the collection to update
--bid=<bid> : (decimal) amount to back the claim
--claims=<claims> : (list) claim ids
2019-11-13 23:50:35 +01:00
--clear_claims : (bool) clear existing claim references (prior to adding new ones)
--title=<title> : (str) title of the collection
--description=<description> : (str) description of the collection
--tags=<tags> : (list) add content tags
--clear_tags : (bool) clear existing tags (prior to adding new ones)
--languages=<languages> : (list) languages used by the collection,
using RFC 5646 format, eg:
for English `--languages=en`
for Spanish (Spain) `--languages=es-ES`
for Spanish (Mexican) `--languages=es-MX`
for Chinese (Simplified) `--languages=zh-Hans`
for Chinese (Traditional) `--languages=zh-Hant`
--clear_languages : (bool) clear existing languages (prior to adding new ones)
--locations=<locations> : (list) locations of the collection, consisting of 2 letter
`country` code and a `state`, `city` and a postal
`code` along with a `latitude` and `longitude`.
for JSON RPC: pass a dictionary with aforementioned
attributes as keys, eg:
...
"locations": [{'country': 'US', 'state': 'NH'}]
...
for command line: pass a colon delimited list
with values in the following order:
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
making sure to include colon for blank values, for
example to provide only the city:
... --locations="::Manchester"
with all values set:
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
optionally, you can just pass the "LATITUDE:LONGITUDE":
... --locations="42.990605:-71.460989"
finally, you can also pass JSON string of dictionary
on the command line as you would via JSON RPC
... --locations="{'country': 'US', 'state': 'NH'}"
--clear_locations : (bool) clear existing locations (prior to adding new ones)
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
--channel_id=<channel_id> : (str) claim id of the publisher channel
--channel_name=<channel_name> : (str) name of the publisher channel
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
2019-11-13 23:50:35 +01:00
--account_id=<account_id> : (str) account in which to look for collection (default: all)
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
--claim_address=<claim_address>: (str) address where the collection is sent
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until transaction is in mempool
--replace : (bool) instead of modifying specific values on
the collection, this will clear all existing values
and only save passed in values, useful for form
submissions where all values are always set
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
else:
account = wallet.default_account
accounts = wallet.accounts
existing_collections = await self.ledger.get_collections(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
if len(existing_collections) != 1:
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
# TODO: use error from lbry.error
2019-11-13 23:50:35 +01:00
raise Exception(
f"Can't find the collection '{claim_id}' in account(s) {account_ids}."
)
old_txo = existing_collections[0]
2019-11-14 19:25:33 +01:00
if not old_txo.claim.is_collection:
# TODO: use error from lbry.error
2019-11-13 23:50:35 +01:00
raise Exception(
2019-11-14 19:25:33 +01:00
f"A claim with id '{claim_id}' was found but it is not a collection."
2019-11-13 23:50:35 +01:00
)
if bid is not None:
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
else:
amount = old_txo.amount
if claim_address is not None:
self.valid_address_or_error(claim_address)
else:
claim_address = old_txo.get_address(account.ledger)
channel = None
if channel_id or channel_name:
channel = await self.get_channel_or_error(
wallet, channel_account_id, channel_id, channel_name, for_signing=True)
elif old_txo.claim.is_signed and not clear_channel and not replace:
channel = old_txo.channel
if replace:
claim = Claim()
claim.collection.update(**kwargs)
else:
claim = Claim.from_bytes(old_txo.claim.to_bytes())
claim.collection.update(**kwargs)
tx = await Transaction.claim_update(
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
)
new_txo = tx.outputs[0]
if channel:
new_txo.sign(channel)
await tx.sign(funding_accounts)
if not preview:
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
2019-11-13 23:50:35 +01:00
else:
await account.ledger.release_tx(tx)
return tx
@requires(WALLET_COMPONENT)
async def jsonrpc_collection_abandon(self, *args, **kwargs):
"""
Abandon one of my collection claims.
Usage:
collection_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--preview] [--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
Returns: {Transaction}
"""
return await self.jsonrpc_stream_abandon(*args, **kwargs)
@requires(WALLET_COMPONENT)
2021-03-04 04:14:48 +01:00
def jsonrpc_collection_list(
self, resolve_claims=0, resolve=False, account_id=None,
wallet_id=None, page=None, page_size=None):
2019-11-13 23:50:35 +01:00
"""
List my collection claims.
Usage:
collection_list [--resolve_claims=<resolve_claims>] [--resolve] [<account_id> | --account_id=<account_id>]
2019-11-15 00:03:49 +01:00
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
2019-11-13 23:50:35 +01:00
Options:
--resolve : (bool) resolve collection claim
2019-11-14 03:16:27 +01:00
--resolve_claims=<resolve_claims> : (int) resolve every claim
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2019-11-13 23:50:35 +01:00
Returns: {Paginated[Output]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if account_id:
2020-01-03 04:18:49 +01:00
account = wallet.get_account_or_error(account_id)
2021-03-04 04:14:48 +01:00
collections = account.get_collections
2019-11-13 23:50:35 +01:00
collection_count = account.get_collection_count
else:
2021-03-04 04:14:48 +01:00
collections = partial(self.ledger.get_collections, wallet=wallet, accounts=wallet.accounts)
collection_count = partial(self.ledger.get_collection_count, wallet=wallet, accounts=wallet.accounts)
2021-03-04 04:19:16 +01:00
return paginate_rows(
collections, collection_count, page, page_size,
resolve=resolve, resolve_claims=resolve_claims
)
2019-11-14 03:16:27 +01:00
async def jsonrpc_collection_resolve(
self, claim_id=None, url=None, wallet_id=None, page=1, page_size=DEFAULT_PAGE_SIZE):
"""
Resolve claims in the collection.
Usage:
collection_resolve (--claim_id=<claim_id> | --url=<url>)
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
Options:
--claim_id=<claim_id> : (str) claim id of the collection
--url=<url> : (str) url of the collection
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Output]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if claim_id:
2021-07-16 20:43:33 +02:00
txo = await self.ledger.get_claim_by_claim_id(claim_id, wallet.accounts)
2019-11-14 03:16:27 +01:00
if not isinstance(txo, Output) or not txo.is_claim:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception(f"Could not find collection with claim_id '{claim_id}'.")
2019-11-14 03:16:27 +01:00
elif url:
txo = (await self.ledger.resolve(wallet.accounts, [url]))[url]
if not isinstance(txo, Output) or not txo.is_claim:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception(f"Could not find collection with url '{url}'.")
2019-11-14 03:16:27 +01:00
else:
# TODO: use error from lbry.error
2021-08-21 04:36:35 +02:00
raise Exception("Missing argument claim_id or url.")
2019-11-14 03:16:27 +01:00
page_num, page_size = abs(page), min(abs(page_size), 50)
items = await self.ledger.resolve_collection(txo, page_size * (page_num - 1), page_size)
total_items = len(txo.claim.collection.claims.ids)
2019-11-13 23:50:35 +01:00
return {
"items": items,
"total_pages": int((total_items + (page_size - 1)) / page_size),
"total_items": total_items,
"page_size": page_size,
"page": page
}
2019-11-13 23:50:35 +01:00
2019-03-24 21:55:04 +01:00
SUPPORT_DOC = """
Create, list and abandon all types of supports.
2019-03-24 21:55:04 +01:00
"""
@requires(WALLET_COMPONENT)
2019-06-10 07:26:35 +02:00
async def jsonrpc_support_create(
2020-06-05 21:19:14 +02:00
self, claim_id, amount, tip=False,
channel_id=None, channel_name=None, channel_account_id=None,
account_id=None, wallet_id=None, funding_account_ids=None,
comment=None, preview=False, blocking=False):
"""
2019-03-24 21:55:04 +01:00
Create a support or a tip for name claim.
Usage:
2019-03-25 03:59:55 +01:00
support_create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
[--tip] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
2020-06-05 21:19:14 +02:00
[--channel_id=<channel_id> | --channel_name=<channel_name>]
[--channel_account_id=<channel_account_id>...] [--comment=<comment>]
[--preview] [--blocking] [--funding_account_ids=<funding_account_ids>...]
Options:
2020-06-05 21:19:14 +02:00
--claim_id=<claim_id> : (str) claim_id of the claim to support
--amount=<amount> : (decimal) amount of support
--tip : (bool) send support to claim owner, default: false.
--channel_id=<channel_id> : (str) claim id of the supporters identity channel
--channel_name=<channel_name> : (str) name of the supporters identity channel
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
for channel certificates, defaults to all accounts.
--account_id=<account_id> : (str) account to use for holding the transaction
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
2021-06-01 17:50:59 +02:00
--comment=<comment> : (str) add a comment to the support
2020-06-05 21:19:14 +02:00
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until transaction is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
funding_accounts = wallet.get_accounts_or_all(funding_account_ids)
2020-06-05 21:19:14 +02:00
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
amount = self.get_dewies_or_error("amount", amount)
2021-03-10 02:15:50 +01:00
claim = await self.ledger.get_claim_by_claim_id(claim_id)
2019-03-31 00:40:01 +01:00
claim_address = claim.get_address(self.ledger)
2019-03-24 23:14:02 +01:00
if not tip:
account = wallet.get_account_or_default(account_id)
2019-03-24 23:14:02 +01:00
claim_address = await account.receiving.get_or_create_usable_address()
tx = await Transaction.support(
claim.claim_name, claim_id, amount, claim_address, funding_accounts, funding_accounts[0], channel,
comment=comment
2019-03-24 23:14:02 +01:00
)
2020-06-05 21:19:14 +02:00
new_txo = tx.outputs[0]
if channel:
new_txo.sign(channel)
await tx.sign(funding_accounts)
2019-03-24 23:14:02 +01:00
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
2019-04-20 01:42:35 +02:00
await self.storage.save_supports({claim_id: [{
2019-03-24 23:14:02 +01:00
'txid': tx.id,
'nout': tx.position,
'address': claim_address,
'claim_id': claim_id,
'amount': dewies_to_lbc(amount)
2019-04-20 01:42:35 +02:00
}]})
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('new_support'))
2019-03-24 23:14:02 +01:00
else:
2019-08-12 08:52:37 +02:00
await self.ledger.release_tx(tx)
2019-03-24 23:14:02 +01:00
return tx
2016-08-08 08:32:56 +02:00
@requires(WALLET_COMPONENT)
2020-04-02 02:44:34 +02:00
def jsonrpc_support_list(self, *args, received=False, sent=False, staked=False, **kwargs):
2017-01-03 20:13:01 +01:00
"""
2020-04-02 02:44:34 +02:00
List staked supports and sent/received tips.
2016-03-24 03:27:48 +01:00
2017-05-28 22:01:53 +02:00
Usage:
support_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
2020-04-02 02:44:34 +02:00
[--name=<name>...] [--claim_id=<claim_id>...]
[--received | --sent | --staked] [--is_spent]
2020-03-21 23:06:05 +01:00
[--page=<page>] [--page_size=<page_size>] [--no_totals]
2017-05-28 22:01:53 +02:00
Options:
2020-03-07 06:34:47 +01:00
--name=<name> : (str or list) claim name
--claim_id=<claim_id> : (str or list) claim id
2020-04-02 02:44:34 +02:00
--received : (bool) only show received (tips)
--sent : (bool) only show sent (tips)
--staked : (bool) only show my staked supports
--is_spent : (bool) show abandoned supports
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2020-03-21 23:06:05 +01:00
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
2016-03-24 03:27:48 +01:00
"""
2020-03-07 06:34:47 +01:00
kwargs['type'] = 'support'
2020-04-02 02:44:34 +02:00
if 'is_spent' not in kwargs:
kwargs['is_not_spent'] = True
if received:
2020-03-21 23:06:05 +01:00
kwargs['is_not_my_input'] = True
2020-04-02 02:44:34 +02:00
kwargs['is_my_output'] = True
elif sent:
kwargs['is_my_input'] = True
kwargs['is_not_my_output'] = True
# spent for not my outputs is undetermined
kwargs.pop('is_spent', None)
kwargs.pop('is_not_spent', None)
elif staked:
kwargs['is_my_input'] = True
kwargs['is_my_output'] = True
2020-03-07 06:34:47 +01:00
return self.jsonrpc_txo_list(*args, **kwargs)
@requires(WALLET_COMPONENT)
async def jsonrpc_support_abandon(
self, claim_id=None, txid=None, nout=None, keep=None,
account_id=None, wallet_id=None, preview=False, blocking=False):
"""
2019-03-26 03:06:36 +01:00
Abandon supports, including tips, of a specific claim, optionally
keeping some amount as supports.
Usage:
support_abandon [--claim_id=<claim_id>] [(--txid=<txid> --nout=<nout>)] [--keep=<keep>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--preview] [--blocking]
Options:
2019-08-26 19:22:42 +02:00
--claim_id=<claim_id> : (str) claim_id of the support to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--keep=<keep> : (decimal) amount of lbc to keep as support
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
2019-08-12 08:52:37 +02:00
if account_id:
account = wallet.get_account_or_error(account_id)
accounts = [account]
2019-08-12 08:52:37 +02:00
else:
account = wallet.default_account
accounts = wallet.accounts
2019-03-26 03:06:36 +01:00
if txid is not None and nout is not None:
supports = await self.ledger.get_supports(
wallet=wallet, accounts=accounts, **{'txo.txid': txid, 'txo.position': nout}
)
2019-03-26 03:06:36 +01:00
elif claim_id is not None:
supports = await self.ledger.get_supports(
wallet=wallet, accounts=accounts, claim_id=claim_id
)
else:
# TODO: use error from lbry.error
raise Exception('Must specify claim_id, or txid and nout')
if not supports:
# TODO: use error from lbry.error
raise Exception('No supports found for the specified claim_id or txid:nout')
if keep is not None:
keep = self.get_dewies_or_error('keep', keep)
else:
keep = 0
outputs = []
if keep > 0:
outputs = [
Output.pay_support_pubkey_hash(
keep, supports[0].claim_name, supports[0].claim_id, supports[0].pubkey_hash
)
]
tx = await Transaction.create(
[Input.spend(txo) for txo in supports], outputs, accounts, account
)
if not preview:
2019-08-12 08:52:37 +02:00
await self.broadcast_or_release(tx, blocking)
2020-02-04 02:15:10 +01:00
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
else:
2019-08-12 08:52:37 +02:00
await self.ledger.release_tx(tx)
return tx
2019-01-23 19:00:58 +01:00
TRANSACTION_DOC = """
2019-01-21 21:55:50 +01:00
Transaction management.
"""
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_list(self, account_id=None, wallet_id=None, page=None, page_size=None):
2017-01-03 20:13:01 +01:00
"""
2017-03-14 00:14:11 +01:00
List transactions belonging to wallet
2017-05-28 22:01:53 +02:00
Usage:
transaction_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
Returns:
2018-01-18 19:41:17 +01:00
(list) List of transactions
{
"claim_info": (list) claim info if in txn [{
"address": (str) address of claim,
"balance_delta": (float) bid amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"abandon_info": (list) abandon info if in txn [{
"address": (str) address of abandoned claim,
"balance_delta": (float) returned amount,
"amount": (float) claim amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"confirmations": (int) number of confirmations for the txn,
"date": (str) date and time of txn,
"fee": (float) txn fee,
"support_info": (list) support info if in txn [{
"address": (str) address of support,
"balance_delta": (float) support amount,
"amount": (float) support amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"is_tip": (bool),
"nout": (int) nout
}],
"timestamp": (int) timestamp,
"txid": (str) txn id,
"update_info": (list) update info if in txn [{
"address": (str) address of claim,
2018-01-18 19:41:17 +01:00
"balance_delta": (float) credited/debited
"amount": (float) absolute amount,
"claim_id": (str) claim id,
"claim_name": (str) claim name,
"nout": (int) nout
}],
"value": (float) value of txn
}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if account_id:
2020-01-03 04:18:49 +01:00
account = wallet.get_account_or_error(account_id)
transactions = account.get_transaction_history
transaction_count = account.get_transaction_history_count
else:
transactions = partial(
self.ledger.get_transaction_history, wallet=wallet, accounts=wallet.accounts)
transaction_count = partial(
self.ledger.get_transaction_history_count, wallet=wallet, accounts=wallet.accounts)
2020-02-21 04:12:04 +01:00
return paginate_rows(transactions, transaction_count, page, page_size, read_only=True)
@requires(WALLET_COMPONENT)
def jsonrpc_transaction_show(self, txid):
2017-01-03 20:13:01 +01:00
"""
Get a decoded transaction from a txid
2017-05-28 22:01:53 +02:00
Usage:
transaction_show (<txid> | --txid=<txid>)
Options:
--txid=<txid> : (str) txid of the transaction
2019-04-06 21:33:07 +02:00
Returns: {Transaction}
2017-01-03 20:13:01 +01:00
"""
2018-09-19 15:58:50 +02:00
return self.wallet_manager.get_transaction(txid)
2020-03-07 06:34:47 +01:00
TXO_DOC = """
2020-03-21 00:07:16 +01:00
List and sum transaction outputs.
2020-03-07 06:34:47 +01:00
"""
2020-03-21 00:07:16 +01:00
@staticmethod
def _constrain_txo_from_kwargs(
constraints, type=None, txid=None, # pylint: disable=redefined-builtin
claim_id=None, channel_id=None, not_channel_id=None,
name=None, reposted_claim_id=None,
2020-04-02 02:44:34 +02:00
is_spent=False, is_not_spent=False,
has_source=None, has_no_source=None,
is_my_input_or_output=None, exclude_internal_transfers=False,
is_my_output=None, is_not_my_output=None,
2020-03-21 00:07:16 +01:00
is_my_input=None, is_not_my_input=None):
2020-04-02 02:44:34 +02:00
if is_spent:
constraints['is_spent'] = True
2020-04-02 02:44:34 +02:00
elif is_not_spent:
constraints['is_spent'] = False
if has_source:
constraints['has_source'] = True
elif has_no_source:
constraints['has_source'] = False
2020-03-21 00:07:16 +01:00
constraints['exclude_internal_transfers'] = exclude_internal_transfers
if is_my_input_or_output is True:
constraints['is_my_input_or_output'] = True
else:
if is_my_input is True:
constraints['is_my_input'] = True
elif is_not_my_input is True:
constraints['is_my_input'] = False
if is_my_output is True:
constraints['is_my_output'] = True
elif is_not_my_output is True:
constraints['is_my_output'] = False
database.constrain_single_or_list(constraints, 'txo_type', type, lambda x: TXO_TYPES[x])
database.constrain_single_or_list(constraints, 'channel_id', channel_id)
database.constrain_single_or_list(constraints, 'channel_id', not_channel_id, negate=True)
2020-03-21 00:07:16 +01:00
database.constrain_single_or_list(constraints, 'claim_id', claim_id)
database.constrain_single_or_list(constraints, 'claim_name', name)
database.constrain_single_or_list(constraints, 'txid', txid)
2020-03-21 01:22:57 +01:00
database.constrain_single_or_list(constraints, 'reposted_claim_id', reposted_claim_id)
2020-03-21 00:07:16 +01:00
return constraints
@requires(WALLET_COMPONENT)
def jsonrpc_txo_list(
2020-03-21 23:06:05 +01:00
self, account_id=None, wallet_id=None, page=None, page_size=None,
resolve=False, order_by=None, no_totals=False, include_received_tips=False, **kwargs):
2020-03-07 06:34:47 +01:00
"""
List my transaction outputs.
Usage:
txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] [--claim_id=<claim_id>...]
[--channel_id=<channel_id>...] [--not_channel_id=<not_channel_id>...]
[--name=<name>...] [--is_spent | --is_not_spent]
[--is_my_input_or_output |
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
]
[--exclude_internal_transfers] [--include_received_tips]
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
2020-03-21 23:16:25 +01:00
[--resolve] [--order_by=<order_by>][--no_totals]
2020-03-07 06:34:47 +01:00
Options:
--type=<type> : (str or list) claim type: stream, channel, support,
purchase, collection, repost, other
--txid=<txid> : (str or list) transaction id of outputs
--claim_id=<claim_id> : (str or list) claim id
--channel_id=<channel_id> : (str or list) claims in this channel
--not_channel_id=<not_channel_id>: (str or list) claims not in this channel
2020-03-07 06:34:47 +01:00
--name=<name> : (str or list) claim name
--is_spent : (bool) only show spent txos
--is_not_spent : (bool) only show not spent txos
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
if using this flag the other related flags
are ignored (--is_my_output, --is_my_input, etc)
--is_my_output : (bool) show outputs controlled by you
--is_not_my_output : (bool) show outputs not controlled by you
--is_my_input : (bool) show outputs created by you
--is_not_my_input : (bool) show outputs not created by you
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
2020-03-20 17:09:20 +01:00
"--is_my_input --is_my_output --type=other"
this allows to exclude "change" payments, this
flag can be used in combination with any of the other flags
2021-02-17 00:38:07 +01:00
--include_received_tips : (bool) calculate the amount of tips received for claim outputs
2020-03-07 06:34:47 +01:00
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each claim to provide additional metadata
2020-03-21 23:16:25 +01:00
--order_by=<order_by> : (str) field to order by: 'name', 'height', 'amount' and 'none'
2020-03-21 23:06:05 +01:00
--no_totals : (bool) do not calculate the total number of pages and items in result set
(significant performance boost)
2020-03-07 06:34:47 +01:00
Returns: {Paginated[Output]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if account_id:
account = wallet.get_account_or_error(account_id)
claims = account.get_txos
claim_count = account.get_txo_count
else:
2020-02-25 20:18:09 +01:00
claims = partial(self.ledger.get_txos, wallet=wallet, accounts=wallet.accounts, read_only=True)
claim_count = partial(self.ledger.get_txo_count, wallet=wallet, accounts=wallet.accounts, read_only=True)
constraints = {
'resolve': resolve,
'include_is_spent': True,
'include_is_my_input': True,
'include_is_my_output': True,
'include_received_tips': include_received_tips
}
if order_by is not None:
if order_by == 'name':
constraints['order_by'] = 'txo.claim_name'
2020-03-21 23:16:25 +01:00
elif order_by in ('height', 'amount', 'none'):
constraints['order_by'] = order_by
else:
# TODO: use error from lbry.error
raise ValueError(f"'{order_by}' is not a valid --order_by value.")
2020-03-21 00:07:16 +01:00
self._constrain_txo_from_kwargs(constraints, **kwargs)
2020-03-21 23:06:05 +01:00
return paginate_rows(claims, None if no_totals else claim_count, page, page_size, **constraints)
2020-03-07 06:34:47 +01:00
@requires(WALLET_COMPONENT)
async def jsonrpc_txo_spend(
self, account_id=None, wallet_id=None, batch_size=100,
include_full_tx=False, preview=False, blocking=False, **kwargs):
"""
Spend transaction outputs, batching into multiple transactions as necessary.
Usage:
txo_spend [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...] [--claim_id=<claim_id>...]
[--channel_id=<channel_id>...] [--not_channel_id=<not_channel_id>...]
[--name=<name>...] [--is_my_input | --is_not_my_input]
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
2020-03-31 03:45:58 +02:00
[--preview] [--blocking] [--batch_size=<batch_size>] [--include_full_tx]
Options:
--type=<type> : (str or list) claim type: stream, channel, support,
purchase, collection, repost, other
--txid=<txid> : (str or list) transaction id of outputs
--claim_id=<claim_id> : (str or list) claim id
--channel_id=<channel_id> : (str or list) claims in this channel
--not_channel_id=<not_channel_id>: (str or list) claims not in this channel
--name=<name> : (str or list) claim name
--is_my_input : (bool) show outputs created by you
--is_not_my_input : (bool) show outputs not created by you
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
"--is_my_input --is_my_output --type=other"
this allows to exclude "change" payments, this
flag can be used in combination with any of the other flags
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--preview : (bool) do not broadcast the transaction
--blocking : (bool) wait until abandon is in mempool
--batch_size=<batch_size> : (int) number of txos to spend per transactions
--include_full_tx : (bool) include entire tx in output and not just the txid
Returns: {List[Transaction]}
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
accounts = [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts
txos = await self.ledger.get_txos(
wallet=wallet, accounts=accounts, read_only=True,
no_tx=True, no_channel_info=True,
**self._constrain_txo_from_kwargs(
{}, is_not_spent=True, is_my_output=True, **kwargs
)
)
txs = []
while txos:
txs.append(
await Transaction.create(
[Input.spend(txos.pop()) for _ in range(min(len(txos), batch_size))],
[], accounts, accounts[0]
)
)
if not preview:
for tx in txs:
await self.broadcast_or_release(tx, blocking)
if include_full_tx:
return txs
return [{'txid': tx.id} for tx in txs]
2020-03-21 00:07:16 +01:00
@requires(WALLET_COMPONENT)
def jsonrpc_txo_sum(self, account_id=None, wallet_id=None, **kwargs):
"""
2020-03-21 01:22:57 +01:00
Sum of transaction outputs.
2020-03-21 00:07:16 +01:00
Usage:
txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
[--channel_id=<channel_id>...] [--not_channel_id=<not_channel_id>...]
[--claim_id=<claim_id>...] [--name=<name>...]
2020-04-02 02:53:09 +02:00
[--is_spent] [--is_not_spent]
2020-03-21 00:07:16 +01:00
[--is_my_input_or_output |
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
]
2020-03-21 01:22:57 +01:00
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
2020-03-21 00:07:16 +01:00
Options:
--type=<type> : (str or list) claim type: stream, channel, support,
purchase, collection, repost, other
--txid=<txid> : (str or list) transaction id of outputs
--claim_id=<claim_id> : (str or list) claim id
--name=<name> : (str or list) claim name
--channel_id=<channel_id> : (str or list) claims in this channel
--not_channel_id=<not_channel_id>: (str or list) claims not in this channel
--is_spent : (bool) only show spent txos
--is_not_spent : (bool) only show not spent txos
2020-03-21 00:07:16 +01:00
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
if using this flag the other related flags
are ignored (--is_my_output, --is_my_input, etc)
--is_my_output : (bool) show outputs controlled by you
--is_not_my_output : (bool) show outputs not controlled by you
--is_my_input : (bool) show outputs created by you
--is_not_my_input : (bool) show outputs not created by you
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
"--is_my_input --is_my_output --type=other"
this allows to exclude "change" payments, this
flag can be used in combination with any of the other flags
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
2020-03-21 01:22:57 +01:00
Returns: int
2020-03-21 00:07:16 +01:00
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
return self.ledger.get_txo_sum(
wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
read_only=True, **self._constrain_txo_from_kwargs({}, **kwargs)
)
@requires(WALLET_COMPONENT)
2020-03-26 06:13:09 +01:00
async def jsonrpc_txo_plot(
self, account_id=None, wallet_id=None,
days_back=0, start_day=None, days_after=None, end_day=None, **kwargs):
"""
Plot transaction output sum over days.
Usage:
txo_plot [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
2020-04-02 02:53:09 +02:00
[--claim_id=<claim_id>...] [--name=<name>...] [--is_spent] [--is_not_spent]
[--channel_id=<channel_id>...] [--not_channel_id=<not_channel_id>...]
[--is_my_input_or_output |
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
]
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
[--days_back=<days_back> |
[--start_day=<start_day> [--days_after=<days_after> | --end_day=<end_day>]]
]
Options:
--type=<type> : (str or list) claim type: stream, channel, support,
purchase, collection, repost, other
--txid=<txid> : (str or list) transaction id of outputs
--claim_id=<claim_id> : (str or list) claim id
--name=<name> : (str or list) claim name
--channel_id=<channel_id> : (str or list) claims in this channel
--not_channel_id=<not_channel_id>: (str or list) claims not in this channel
2020-04-02 02:53:09 +02:00
--is_spent : (bool) only show spent txos
--is_not_spent : (bool) only show not spent txos
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
if using this flag the other related flags
are ignored (--is_my_output, --is_my_input, etc)
--is_my_output : (bool) show outputs controlled by you
--is_not_my_output : (bool) show outputs not controlled by you
--is_my_input : (bool) show outputs created by you
--is_not_my_input : (bool) show outputs not created by you
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
"--is_my_input --is_my_output --type=other"
this allows to exclude "change" payments, this
flag can be used in combination with any of the other flags
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--days_back=<days_back> : (int) number of days back from today
(not compatible with --start_day, --days_after, --end_day)
--start_day=<start_day> : (date) start on specific date (YYYY-MM-DD)
(instead of --days_back)
--days_after=<days_after> : (int) end number of days after --start_day
(instead of --end_day)
--end_day=<end_day> : (date) end on specific date (YYYY-MM-DD)
(instead of --days_after)
Returns: List[Dict]
"""
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
2020-03-26 06:13:09 +01:00
plot = await self.ledger.get_txo_plot(
wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
read_only=True, days_back=days_back, start_day=start_day, days_after=days_after, end_day=end_day,
**self._constrain_txo_from_kwargs({}, **kwargs)
)
2020-03-26 06:13:09 +01:00
for row in plot:
row['total'] = dewies_to_lbc(row['total'])
return plot
2019-01-21 21:55:50 +01:00
UTXO_DOC = """
Unspent transaction management.
"""
@requires(WALLET_COMPONENT)
2020-03-07 06:34:47 +01:00
def jsonrpc_utxo_list(self, *args, **kwargs):
2017-11-01 22:17:38 +01:00
"""
List unspent transaction outputs
Usage:
utxo_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
2017-11-01 22:17:38 +01:00
Options:
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2019-04-06 21:33:07 +02:00
Returns: {Paginated[Output]}
2017-11-01 22:17:38 +01:00
"""
2020-03-07 06:34:47 +01:00
kwargs['type'] = ['other', 'purchase']
2020-04-02 02:53:09 +02:00
kwargs['is_not_spent'] = True
2020-03-07 06:34:47 +01:00
return self.jsonrpc_txo_list(*args, **kwargs)
2017-11-01 22:17:38 +01:00
2019-01-04 08:49:29 +01:00
@requires(WALLET_COMPONENT)
2019-10-14 03:59:06 +02:00
async def jsonrpc_utxo_release(self, account_id=None, wallet_id=None):
2019-01-04 08:49:29 +01:00
"""
When spending a UTXO it is locally locked to prevent double spends;
occasionally this can result in a UTXO being locked which ultimately
did not get spent (failed to broadcast, spend transaction was not
accepted by blockchain node, etc). This command releases the lock
on all UTXOs in your account.
Usage:
utxo_release [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
2019-01-04 08:49:29 +01:00
Options:
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
2019-01-04 08:49:29 +01:00
Returns:
None
"""
2019-09-20 15:25:50 +02:00
wallet = self.wallet_manager.get_wallet_or_default(wallet_id)
if account_id is not None:
2019-10-14 03:59:06 +02:00
await wallet.get_account_or_error(account_id).release_all_outputs()
else:
for account in wallet.accounts:
2019-10-14 03:59:06 +02:00
await account.release_all_outputs()
2019-01-04 08:49:29 +01:00
2019-01-21 21:55:50 +01:00
BLOB_DOC = """
Blob management.
"""
@requires(WALLET_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT)
2019-01-22 23:44:17 +01:00
async def jsonrpc_blob_get(self, blob_hash, timeout=None, read=False):
"""
Download and return a blob
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read]
2017-05-28 22:01:53 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
--timeout=<timeout> : (int) timeout in number of seconds
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/Fail message or (dict) decoded data
"""
blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash)
2019-01-22 23:44:17 +01:00
if read:
2019-05-01 20:24:19 +02:00
with blob.reader_context() as handle:
2019-01-22 23:44:17 +01:00
return handle.read().decode()
2019-05-01 20:24:19 +02:00
elif isinstance(blob, BlobBuffer):
log.warning("manually downloaded blob buffer could have missed garbage collection, clearing it")
blob.delete()
return "Downloaded blob %s" % blob_hash
@requires(BLOB_COMPONENT, DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_delete(self, blob_hash):
"""
Delete a blob
2017-05-28 22:01:53 +02:00
Usage:
2018-11-14 20:02:07 +01:00
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
2017-06-09 18:14:03 +02:00
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
Returns:
2017-03-14 00:14:11 +01:00
(str) Success/fail message
"""
if not blob_hash or not is_valid_blobhash(blob_hash):
return f"Invalid blob hash to delete '{blob_hash}'"
2020-01-29 01:24:05 +01:00
streams = self.file_manager.get_filtered(sd_hash=blob_hash)
2019-01-22 23:44:17 +01:00
if streams:
2020-01-29 01:24:05 +01:00
await self.file_manager.delete(streams[0])
2019-01-22 23:44:17 +01:00
else:
await self.blob_manager.delete_blobs([blob_hash])
return "Deleted %s" % blob_hash
2019-01-23 19:00:58 +01:00
PEER_DOC = """
DHT / Blob Exchange peer commands.
"""
2022-02-11 23:45:08 +01:00
async def jsonrpc_peer_list(self, blob_hash, page=None, page_size=None):
2016-08-03 09:16:06 +02:00
"""
Get peers for blob hash
2017-05-28 22:01:53 +02:00
Usage:
2019-01-22 23:44:17 +01:00
peer_list (<blob_hash> | --blob_hash=<blob_hash>)
[--page=<page>] [--page_size=<page_size>]
2017-05-28 22:01:53 +02:00
Options:
2019-01-22 23:44:17 +01:00
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-05-28 22:01:53 +02:00
2016-08-03 09:16:06 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>,
'node_id': <peer node id>}
2016-08-03 09:16:06 +02:00
"""
2018-11-12 20:45:41 +01:00
if not is_valid_blobhash(blob_hash):
# TODO: use error from lbry.error
raise Exception("invalid blob hash")
peer_q = asyncio.Queue(loop=self.component_manager.loop)
2022-04-04 04:20:02 +02:00
if self.component_manager.has_component(TRACKER_ANNOUNCER_COMPONENT):
tracker = self.component_manager.get_component(TRACKER_ANNOUNCER_COMPONENT)
tracker_peers = await tracker.get_kademlia_peer_list(bytes.fromhex(blob_hash))
log.info("Found %d peers for %s from trackers.", len(tracker_peers), blob_hash[:8])
peer_q.put_nowait(tracker_peers)
elif not self.component_manager.has_component(DHT_COMPONENT):
raise Exception("Peer list needs, at least, either a DHT component or a Tracker component for discovery.")
2022-04-04 04:20:02 +02:00
peers = []
if self.component_manager.has_component(DHT_COMPONENT):
await self.dht_node._peers_for_value_producer(blob_hash, peer_q)
while not peer_q.empty():
peers.extend(peer_q.get_nowait())
2022-04-04 04:20:02 +02:00
results = {
(peer.address, peer.tcp_port): {
"node_id": hexlify(peer.node_id).decode() if peer.node_id else None,
2019-01-22 23:44:17 +01:00
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
}
2019-01-22 23:44:17 +01:00
for peer in peers
2022-04-04 04:20:02 +02:00
}
return paginate_list(list(results.values()), page, page_size)
2016-08-03 09:16:06 +02:00
@requires(DATABASE_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
"""
Announce blobs to the DHT
Usage:
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
Options:
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
--stream_hash=<stream_hash> : (str) announce all blobs associated with
stream_hash
--sd_hash=<sd_hash> : (str) announce all blobs associated with
sd_hash and the sd_hash itself
Returns:
(bool) true if successful
"""
blob_hashes = []
if blob_hash:
blob_hashes.append(blob_hash)
elif stream_hash or sd_hash:
if sd_hash and stream_hash:
# TODO: use error from lbry.error
raise Exception("either the sd hash or the stream hash should be provided, not both")
if sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
else:
# TODO: use error from lbry.error
raise Exception('single argument must be specified')
2018-12-15 21:31:02 +01:00
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
return True
@requires(BLOB_COMPONENT, WALLET_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
finished=None, page=None, page_size=None):
2016-08-19 08:41:23 +02:00
"""
2017-03-14 00:14:11 +01:00
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
2016-08-26 06:32:33 +02:00
2017-06-22 00:16:41 +02:00
Usage:
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
[<stream_hash> | --stream_hash=<stream_hash>]
[<sd_hash> | --sd_hash=<sd_hash>]
[--page=<page>] [--page_size=<page_size>]
2017-06-22 00:16:41 +02:00
Options:
--needed : (bool) only return needed blobs
--finished : (bool) only return finished blobs
--uri=<uri> : (str) filter blobs by stream in a uri
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
2021-10-15 15:38:15 +02:00
--sd_hash=<sd_hash> : (str) filter blobs in a stream by sd hash, ie the hash of the stream
descriptor blob for a stream that has been downloaded
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
2017-06-22 00:16:41 +02:00
2016-08-26 06:32:33 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(list) List of blob hashes
2016-08-19 08:41:23 +02:00
"""
2019-01-22 23:44:17 +01:00
if uri or stream_hash or sd_hash:
if uri:
2019-10-29 13:58:55 +01:00
metadata = (await self.resolve([], uri))[uri]
sd_hash = utils.get_sd_hash(metadata)
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
elif stream_hash:
2018-12-15 21:31:02 +01:00
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
elif sd_hash:
2018-12-15 21:31:02 +01:00
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
2019-01-22 23:44:17 +01:00
if sd_hash:
blobs = [sd_hash]
else:
blobs = []
2019-01-22 23:44:17 +01:00
if stream_hash:
blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]])
else:
2019-01-22 23:44:17 +01:00
blobs = list(self.blob_manager.completed_blob_hashes)
if needed:
blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.is_blob_verified(blob_hash)]
if finished:
blobs = [blob_hash for blob_hash in blobs if self.blob_manager.is_blob_verified(blob_hash)]
return paginate_list(blobs, page, page_size)
2016-08-27 01:58:53 +02:00
@requires(BLOB_COMPONENT)
2019-01-11 00:40:20 +01:00
async def jsonrpc_blob_reflect(self, blob_hashes, reflector_server=None):
2018-03-22 21:54:29 +01:00
"""
Reflects specified blobs
Usage:
blob_reflect (<blob_hashes>...) [--reflector_server=<reflector_server>]
Options:
2018-04-12 19:27:06 +02:00
--reflector_server=<reflector_server> : (str) reflector address
2018-03-22 21:54:29 +01:00
Returns:
(list) reflected blob hashes
"""
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
2018-03-22 21:54:29 +01:00
@requires(BLOB_COMPONENT)
2018-12-15 21:31:02 +01:00
async def jsonrpc_blob_reflect_all(self):
2016-08-27 01:58:53 +02:00
"""
Reflects all saved blobs
2017-05-28 22:01:53 +02:00
Usage:
blob_reflect_all
Options:
None
2016-08-27 01:58:53 +02:00
Returns:
2017-03-14 00:14:11 +01:00
(bool) true if successful
2016-08-27 01:58:53 +02:00
"""
2016-08-19 08:41:23 +02:00
2019-01-22 23:44:17 +01:00
raise NotImplementedError()
2021-08-16 20:15:12 +02:00
@requires(DISK_SPACE_COMPONENT)
async def jsonrpc_blob_clean(self):
"""
Deletes blobs to cleanup disk space
Usage:
blob_clean
Options:
None
Returns:
(bool) true if successful
"""
return await self.disk_space_manager.clean()
2021-08-16 20:15:12 +02:00
@requires(FILE_MANAGER_COMPONENT)
2019-01-22 23:44:17 +01:00
async def jsonrpc_file_reflect(self, **kwargs):
2018-03-29 16:46:29 +02:00
"""
2019-01-22 23:44:17 +01:00
Reflect all the blobs in a file matching the filter criteria
2018-03-29 16:46:29 +02:00
Usage:
2019-01-22 23:44:17 +01:00
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
[--reflector=<reflector>]
2018-03-29 16:46:29 +02:00
Options:
2019-01-22 23:44:17 +01:00
--sd_hash=<sd_hash> : (str) get file with matching sd hash
--file_name=<file_name> : (str) get file with matching file name in the
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--reflector=<reflector> : (str) reflector server, ip address or url
by default choose a server from the config
Returns:
(list) list of blobs reflected
"""
2019-01-30 20:59:48 +01:00
server, port = kwargs.get('server'), kwargs.get('port')
if server and port:
port = int(port)
else:
server, port = random.choice(self.conf.reflector_servers)
reflected = await asyncio.gather(*[
2021-09-16 23:38:56 +02:00
self.file_manager.source_managers['stream'].reflect_stream(stream, server, port)
for stream in self.file_manager.get_filtered(**kwargs)
2019-01-30 20:59:48 +01:00
])
total = []
for reflected_for_stream in reflected:
total.extend(reflected_for_stream)
return total
2019-01-22 23:44:17 +01:00
@requires(DHT_COMPONENT)
async def jsonrpc_peer_ping(self, node_id, address, port):
"""
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
if not provided the peer is located first.
2019-01-22 23:44:17 +01:00
Usage:
peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>)
2019-02-19 23:26:08 +01:00
Options:
None
2018-03-29 16:46:29 +02:00
Returns:
(str) pong, or {'error': <error message>} if an error is encountered
"""
2019-01-22 23:44:17 +01:00
peer = None
if node_id and address and port:
2019-10-01 02:00:10 +02:00
peer = make_kademlia_peer(unhexlify(node_id), address, udp_port=int(port))
2019-02-02 08:02:19 +01:00
try:
return await self.dht_node.protocol.get_rpc_peer(peer).ping()
except asyncio.TimeoutError:
return {'error': 'timeout'}
2019-01-22 23:44:17 +01:00
if not peer:
return {'error': 'peer not found'}
2018-03-29 16:46:29 +02:00
@requires(DHT_COMPONENT)
2017-10-10 21:04:48 +02:00
def jsonrpc_routing_table_get(self):
"""
Get DHT routing information
Usage:
routing_table_get
Options:
None
2017-10-10 21:04:48 +02:00
Returns:
2019-01-22 23:44:17 +01:00
(dict) dictionary containing routing and peer information
2017-10-10 21:04:48 +02:00
{
"buckets": {
<bucket index>: [
{
"address": (str) peer address,
2019-01-22 23:44:17 +01:00
"udp_port": (int) peer udp port,
"tcp_port": (int) peer tcp port,
2017-10-10 21:04:48 +02:00
"node_id": (str) peer node id,
}
2017-10-11 21:14:29 +02:00
]
},
2017-10-10 21:04:48 +02:00
"node_id": (str) the local dht node id
"prefix_neighbors_count": (int) the amount of peers sharing the same byte prefix of the local node id
2017-10-11 21:14:29 +02:00
}
2017-10-10 21:04:48 +02:00
"""
2019-01-22 23:44:17 +01:00
result = {
'buckets': {},
'prefix_neighbors_count': 0
2019-01-22 23:44:17 +01:00
}
2017-10-10 21:04:48 +02:00
2021-08-21 04:36:35 +02:00
for i, _ in enumerate(self.dht_node.protocol.routing_table.buckets):
2018-11-13 15:36:52 +01:00
result['buckets'][i] = []
2019-01-22 23:44:17 +01:00
for peer in self.dht_node.protocol.routing_table.buckets[i].peers:
2017-10-10 21:04:48 +02:00
host = {
2019-01-22 23:44:17 +01:00
"address": peer.address,
"udp_port": peer.udp_port,
"tcp_port": peer.tcp_port,
"node_id": hexlify(peer.node_id).decode(),
2017-10-10 21:04:48 +02:00
}
2018-11-13 15:36:52 +01:00
result['buckets'][i].append(host)
result['prefix_neighbors_count'] += 1 if peer.node_id[0] == self.dht_node.protocol.node_id[0] else 0
2017-10-10 21:04:48 +02:00
2019-01-22 23:44:17 +01:00
result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode()
2018-12-15 21:31:02 +01:00
return result
2017-10-10 21:04:48 +02:00
2020-03-11 00:50:25 +01:00
TRACEMALLOC_DOC = """
Controls and queries tracemalloc memory tracing tools for troubleshooting.
"""
2020-03-16 10:39:42 +01:00
def jsonrpc_tracemalloc_enable(self): # pylint: disable=no-self-use
2020-03-11 00:50:25 +01:00
"""
2020-03-16 10:39:42 +01:00
Enable tracemalloc memory tracing
2020-03-11 00:50:25 +01:00
Usage:
2020-03-16 10:39:42 +01:00
jsonrpc_tracemalloc_enable
2020-03-11 00:50:25 +01:00
Options:
2020-03-16 10:39:42 +01:00
None
2020-03-11 00:50:25 +01:00
Returns:
(bool) is it tracing?
"""
2020-03-16 10:39:42 +01:00
tracemalloc.start()
return tracemalloc.is_tracing()
def jsonrpc_tracemalloc_disable(self): # pylint: disable=no-self-use
"""
Disable tracemalloc memory tracing
Usage:
jsonrpc_tracemalloc_disable
Options:
None
Returns:
(bool) is it tracing?
"""
tracemalloc.stop()
2020-03-11 00:50:25 +01:00
return tracemalloc.is_tracing()
2020-03-11 21:59:56 +01:00
def jsonrpc_tracemalloc_top(self, items: int = 10): # pylint: disable=no-self-use
2020-03-11 01:39:42 +01:00
"""
Show most common objects, the place that created them and their size.
Usage:
jsonrpc_tracemalloc_top [(<items> | --items=<items>)]
Options:
--items=<items> : (int) maximum items to return, from the most common
Returns:
(dict) dictionary containing most common objects in memory
{
"line": (str) filename and line number where it was created,
"code": (str) code that created it,
"size": (int) size in bytes, for each "memory block",
"count" (int) number of memory blocks
}
"""
if not tracemalloc.is_tracing():
# TODO: use error from lbry.error
2020-03-11 01:39:42 +01:00
raise Exception("Enable tracemalloc first! See 'tracemalloc set' command.")
stats = tracemalloc.take_snapshot().filter_traces((
tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
tracemalloc.Filter(False, "<unknown>"),
# tracemalloc and linecache here use some memory, but thats not relevant
tracemalloc.Filter(False, tracemalloc.__file__),
tracemalloc.Filter(False, linecache.__file__),
)).statistics('lineno', True)
results = []
for stat in stats:
frame = stat.traceback[0]
filename = os.sep.join(frame.filename.split(os.sep)[-2:])
line = linecache.getline(frame.filename, frame.lineno).strip()
results.append({
"line": f"{filename}:{frame.lineno}",
"code": line,
"size": stat.size,
"count": stat.count
})
if len(results) == items:
break
return results
2019-08-12 08:52:37 +02:00
async def broadcast_or_release(self, tx, blocking=False):
await self.wallet_manager.broadcast_or_release(tx, blocking)
2019-06-10 07:26:35 +02:00
def valid_address_or_error(self, address, allow_script_address=False):
try:
assert self.ledger.is_pubkey_address(address) or (
allow_script_address and self.ledger.is_script_address(address)
)
except:
# TODO: use error from lbry.error
raise Exception(f"'{address}' is not a valid address")
@staticmethod
def valid_stream_name_or_error(name: str):
try:
if not name:
raise InputStringIsBlankError('Stream name')
2019-04-29 06:38:58 +02:00
parsed = URL.parse(name)
if parsed.has_channel:
# TODO: use error from lbry.error
raise Exception(
"Stream names cannot start with '@' symbol. This is reserved for channels claims."
)
2019-04-29 06:38:58 +02:00
if not parsed.has_stream or parsed.stream.name != name:
# TODO: use error from lbry.error
2019-08-26 18:06:18 +02:00
raise Exception('Stream name has invalid characters.')
2019-04-29 06:38:58 +02:00
except (TypeError, ValueError):
# TODO: use error from lbry.error
raise Exception("Invalid stream name.")
2019-11-12 18:17:35 +01:00
@staticmethod
def valid_collection_name_or_error(name: str):
try:
if not name:
# TODO: use error from lbry.error
2019-11-12 18:17:35 +01:00
raise Exception('Collection name cannot be blank.')
parsed = URL.parse(name)
if parsed.has_channel:
# TODO: use error from lbry.error
2019-11-12 18:17:35 +01:00
raise Exception(
"Collection names cannot start with '@' symbol. This is reserved for channels claims."
)
if not parsed.has_stream or parsed.stream.name != name:
# TODO: use error from lbry.error
2019-11-12 18:17:35 +01:00
raise Exception('Collection name has invalid characters.')
except (TypeError, ValueError):
# TODO: use error from lbry.error
2019-11-12 18:17:35 +01:00
raise Exception("Invalid collection name.")
@staticmethod
def valid_channel_name_or_error(name: str):
try:
if not name:
# TODO: use error from lbry.error
raise Exception(
"Channel name cannot be blank."
)
2019-04-29 06:38:58 +02:00
parsed = URL.parse(name)
if not parsed.has_channel:
# TODO: use error from lbry.error
raise Exception("Channel names must start with '@' symbol.")
2019-04-29 06:38:58 +02:00
if parsed.channel.name != name:
# TODO: use error from lbry.error
raise Exception("Channel name has invalid character")
2019-04-29 06:38:58 +02:00
except (TypeError, ValueError):
# TODO: use error from lbry.error
raise Exception("Invalid channel name.")
2019-03-24 21:55:04 +01:00
def get_fee_address(self, kwargs: dict, claim_address: str) -> str:
if 'fee_address' in kwargs:
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(kwargs['fee_address'])
2019-03-24 21:55:04 +01:00
return kwargs['fee_address']
if 'fee_currency' in kwargs or 'fee_amount' in kwargs:
return claim_address
2019-03-24 21:55:04 +01:00
2020-01-03 04:18:49 +01:00
async def get_receiving_address(self, address: str, account: Optional[Account]) -> str:
if address is None and account is not None:
2019-03-24 21:55:04 +01:00
return await account.receiving.get_or_create_usable_address()
2019-03-25 00:45:54 +01:00
self.valid_address_or_error(address)
2019-03-24 21:55:04 +01:00
return address
async def get_channel_or_none(
self, wallet: Wallet, account_ids: List[str], channel_id: str = None,
channel_name: str = None, for_signing: bool = False) -> Output:
if channel_id is not None or channel_name is not None:
return await self.get_channel_or_error(
wallet, account_ids, channel_id, channel_name, for_signing
)
2019-03-24 21:55:04 +01:00
async def get_channel_or_error(
self, wallet: Wallet, account_ids: List[str], channel_id: str = None,
channel_name: str = None, for_signing: bool = False) -> Output:
2019-03-27 21:02:17 +01:00
if channel_id:
key, value = 'id', channel_id
elif channel_name:
key, value = 'name', channel_name
else:
# TODO: use error from lbry.error
2019-03-27 21:02:17 +01:00
raise ValueError("Couldn't find channel because a channel_id or channel_name was not provided.")
channels = await self.ledger.get_channels(
wallet=wallet, accounts=wallet.get_accounts_or_all(account_ids),
**{f'claim_{key}': value}
)
if len(channels) == 1:
if for_signing and not channels[0].has_private_key:
# TODO: use error from lbry.error
2021-09-17 06:12:58 +02:00
raise PrivateKeyNotFoundError(key, value)
return channels[0]
elif len(channels) > 1:
# TODO: use error from lbry.error
raise ValueError(
f"Multiple channels found with channel_{key} '{value}', "
f"pass a channel_id to narrow it down."
)
# TODO: use error from lbry.error
raise ValueError(f"Couldn't find channel with channel_{key} '{value}'.")
2019-03-24 21:55:04 +01:00
@staticmethod
2019-03-24 21:55:04 +01:00
def get_dewies_or_error(argument: str, lbc: str, positive_value=False):
2018-10-03 22:38:47 +02:00
try:
2019-03-24 21:55:04 +01:00
dewies = lbc_to_dewies(lbc)
if positive_value and dewies <= 0:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise ValueError(f"'{argument}' value must be greater than 0.0")
return dewies
2018-10-03 22:38:47 +02:00
except ValueError as e:
# TODO: use error from lbry.error
2019-03-24 21:55:04 +01:00
raise ValueError(f"Invalid value for '{argument}': {e.args[0]}")
2019-03-25 00:45:54 +01:00
async def resolve(self, accounts, urls, **kwargs):
results = await self.ledger.resolve(accounts, urls, **kwargs)
if self.conf.save_resolved_claims and results:
2019-05-05 23:04:06 +02:00
try:
2020-01-29 01:24:05 +01:00
await self.storage.save_claim_from_output(
self.ledger,
*(result for result in results.values() if isinstance(result, Output))
)
2019-05-05 23:04:06 +02:00
except DecodeError:
pass
2019-03-25 00:45:54 +01:00
return results
2019-03-24 21:55:04 +01:00
@staticmethod
def _old_get_temp_claim_info(tx, txo, address, claim_dict, name, bid):
2019-03-24 21:55:04 +01:00
return {
"claim_id": txo.claim_id,
"name": name,
"amount": bid,
"address": address,
"txid": tx.id,
"nout": txo.position,
"value": claim_dict,
"height": -1,
"claim_sequence": -1,
}
2018-07-26 05:29:13 +02:00
def loggly_time_string(date):
formatted_dt = date.strftime("%Y-%m-%dT%H:%M:%S")
milliseconds = str(round(date.microsecond * (10.0 ** -5), 3))
2019-01-22 23:44:17 +01:00
return quote(formatted_dt + milliseconds + "Z")
2017-01-02 20:52:24 +01:00
def get_loggly_query_string(installation_id):
2017-01-02 22:09:28 +01:00
base_loggly_search_url = "https://lbry.loggly.com/search#"
2017-01-02 20:52:24 +01:00
now = utils.now()
yesterday = now - utils.timedelta(days=1)
2017-01-02 22:09:28 +01:00
params = {
'terms': f'json.installation_id:{installation_id[:SHORT_ID_LEN]}*',
2017-01-02 22:09:28 +01:00
'from': loggly_time_string(yesterday),
'to': loggly_time_string(now)
}
2019-01-22 23:44:17 +01:00
data = urlencode(params)
2017-01-02 22:09:28 +01:00
return base_loggly_search_url + data