2016-07-25 19:19:19 +02:00
|
|
|
import binascii
|
|
|
|
import logging.handlers
|
2016-06-03 09:45:46 +02:00
|
|
|
import mimetypes
|
2016-02-29 19:25:47 +01:00
|
|
|
import os
|
2016-04-07 09:12:09 +02:00
|
|
|
import base58
|
2016-07-25 19:19:19 +02:00
|
|
|
import requests
|
2017-01-02 20:52:24 +01:00
|
|
|
import urllib
|
2017-03-15 21:19:11 +01:00
|
|
|
import json
|
2017-03-16 20:35:54 +01:00
|
|
|
import textwrap
|
2017-02-16 05:38:33 +01:00
|
|
|
import random
|
2017-08-02 21:51:25 +02:00
|
|
|
import signal
|
2017-10-10 21:04:48 +02:00
|
|
|
from copy import deepcopy
|
2016-04-21 04:02:52 +02:00
|
|
|
from twisted.web import server
|
2017-04-07 02:45:05 +02:00
|
|
|
from twisted.internet import defer, threads, error, reactor
|
2016-04-18 19:21:53 +02:00
|
|
|
from twisted.internet.task import LoopingCall
|
2016-11-30 00:06:16 +01:00
|
|
|
from twisted.python.failure import Failure
|
2016-02-29 19:25:47 +01:00
|
|
|
|
2017-04-11 04:47:54 +02:00
|
|
|
from lbryschema.claim import ClaimDict
|
2017-04-12 19:01:53 +02:00
|
|
|
from lbryschema.uri import parse_lbry_uri
|
2017-11-09 01:50:37 +01:00
|
|
|
from lbryschema.error import URIParseError, DecodeError
|
2017-08-15 17:56:26 +02:00
|
|
|
from lbryschema.validator import validate_claim_id
|
2017-09-06 16:58:52 +02:00
|
|
|
from lbryschema.address import decode_address
|
2017-04-11 04:47:54 +02:00
|
|
|
|
2016-09-29 23:42:31 +02:00
|
|
|
# TODO: importing this when internet is disabled raises a socket.gaierror
|
2017-04-26 20:18:41 +02:00
|
|
|
from lbrynet.core.system_info import get_lbrynet_version
|
2017-02-10 16:56:22 +01:00
|
|
|
from lbrynet import conf, analytics
|
2016-11-10 20:26:21 +01:00
|
|
|
from lbrynet.conf import LBRYCRD_WALLET, LBRYUM_WALLET, PTC_WALLET
|
2017-02-10 16:56:22 +01:00
|
|
|
from lbrynet.reflector import reupload
|
|
|
|
from lbrynet.reflector import ServerFactory as reflector_server_factory
|
2017-07-13 20:49:25 +02:00
|
|
|
from lbrynet.core.log_support import configure_loggly_handler
|
2017-06-26 03:03:37 +02:00
|
|
|
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory
|
|
|
|
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
|
|
|
from lbrynet.lbry_file.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager
|
|
|
|
from lbrynet.lbry_file.StreamDescriptor import EncryptedFileStreamType
|
2017-06-26 03:04:04 +02:00
|
|
|
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
|
2017-06-26 03:04:33 +02:00
|
|
|
from lbrynet.daemon.Downloader import GetStream
|
|
|
|
from lbrynet.daemon.Publisher import Publisher
|
|
|
|
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
|
|
|
|
from lbrynet.daemon.auth.server import AuthJSONRPCServer
|
2017-02-16 05:39:17 +01:00
|
|
|
from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager
|
2017-07-13 20:49:25 +02:00
|
|
|
from lbrynet.core import utils, system_info
|
2016-11-30 21:20:45 +01:00
|
|
|
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob
|
2016-10-22 01:12:38 +02:00
|
|
|
from lbrynet.core.Session import Session
|
2017-03-06 23:01:35 +01:00
|
|
|
from lbrynet.core.Wallet import LBRYumWallet, SqliteStorage, ClaimOutpoint
|
2016-11-11 19:42:51 +01:00
|
|
|
from lbrynet.core.looping_call_manager import LoopingCallManager
|
|
|
|
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
|
|
|
|
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
|
2017-02-16 15:12:00 +01:00
|
|
|
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError, NoSuchSDHash
|
2017-09-27 23:02:36 +02:00
|
|
|
from lbrynet.core.Error import NoSuchStreamHash, DownloadDataTimeout, DownloadSDTimeout
|
2017-07-07 22:37:35 +02:00
|
|
|
from lbrynet.core.Error import NullFundsError, NegativeFundsError
|
2016-06-07 10:19:51 +02:00
|
|
|
|
2016-07-25 07:40:26 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2016-06-28 20:28:59 +02:00
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
INITIALIZING_CODE = 'initializing'
|
|
|
|
LOADING_DB_CODE = 'loading_db'
|
2017-01-03 20:13:01 +01:00
|
|
|
LOADING_WALLET_CODE = 'loading_wallet'
|
2016-04-18 19:21:53 +02:00
|
|
|
LOADING_FILE_MANAGER_CODE = 'loading_file_manager'
|
|
|
|
LOADING_SERVER_CODE = 'loading_server'
|
|
|
|
STARTED_CODE = 'started'
|
2016-05-11 08:47:33 +02:00
|
|
|
WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits'
|
2016-04-09 08:09:30 +02:00
|
|
|
STARTUP_STAGES = [
|
2017-01-03 20:13:01 +01:00
|
|
|
(INITIALIZING_CODE, 'Initializing'),
|
|
|
|
(LOADING_DB_CODE, 'Loading databases'),
|
|
|
|
(LOADING_WALLET_CODE, 'Catching up with the blockchain'),
|
|
|
|
(LOADING_FILE_MANAGER_CODE, 'Setting up file manager'),
|
|
|
|
(LOADING_SERVER_CODE, 'Starting lbrynet'),
|
|
|
|
(STARTED_CODE, 'Started lbrynet'),
|
|
|
|
(WAITING_FOR_FIRST_RUN_CREDITS, 'Waiting for first run credits'),
|
|
|
|
]
|
2016-04-09 08:09:30 +02:00
|
|
|
|
2016-12-30 07:12:20 +01:00
|
|
|
# TODO: make this consistent with the stages in Downloader.py
|
2016-04-26 04:35:21 +02:00
|
|
|
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
|
|
|
|
DOWNLOAD_TIMEOUT_CODE = 'timeout'
|
|
|
|
DOWNLOAD_RUNNING_CODE = 'running'
|
|
|
|
DOWNLOAD_STOPPED_CODE = 'stopped'
|
|
|
|
STREAM_STAGES = [
|
2017-01-04 23:10:36 +01:00
|
|
|
(INITIALIZING_CODE, 'Initializing'),
|
2017-01-03 20:13:01 +01:00
|
|
|
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
|
|
|
|
(DOWNLOAD_RUNNING_CODE, 'Started %s, got %s/%s blobs, stream status: %s'),
|
|
|
|
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
|
|
|
|
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
|
|
|
|
]
|
|
|
|
|
|
|
|
CONNECTION_STATUS_CONNECTED = 'connected'
|
|
|
|
CONNECTION_STATUS_NETWORK = 'network_connection'
|
|
|
|
CONNECTION_MESSAGES = {
|
|
|
|
CONNECTION_STATUS_CONNECTED: 'No connection problems detected',
|
|
|
|
CONNECTION_STATUS_NETWORK: "Your internet connection appears to have been interrupted",
|
|
|
|
}
|
2016-04-18 19:21:53 +02:00
|
|
|
|
2016-12-19 19:27:45 +01:00
|
|
|
SHORT_ID_LEN = 20
|
|
|
|
|
2016-10-19 01:09:35 +02:00
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
class IterableContainer(object):
|
|
|
|
def __iter__(self):
|
|
|
|
for attr in dir(self):
|
|
|
|
if not attr.startswith("_"):
|
|
|
|
yield getattr(self, attr)
|
|
|
|
|
|
|
|
def __contains__(self, item):
|
|
|
|
for attr in self:
|
|
|
|
if item == attr:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2017-04-11 21:10:24 +02:00
|
|
|
class Checker(object):
|
2016-10-19 01:09:35 +02:00
|
|
|
"""The looping calls the daemon runs"""
|
|
|
|
INTERNET_CONNECTION = 'internet_connection_checker'
|
2017-01-03 20:13:01 +01:00
|
|
|
CONNECTION_STATUS = 'connection_status_checker'
|
2016-10-19 01:09:35 +02:00
|
|
|
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
class _FileID(IterableContainer):
|
2016-10-19 01:09:35 +02:00
|
|
|
"""The different ways a file can be identified"""
|
|
|
|
NAME = 'name'
|
|
|
|
SD_HASH = 'sd_hash'
|
|
|
|
FILE_NAME = 'file_name'
|
2017-02-20 01:22:21 +01:00
|
|
|
STREAM_HASH = 'stream_hash'
|
2017-03-06 23:01:35 +01:00
|
|
|
CLAIM_ID = "claim_id"
|
|
|
|
OUTPOINT = "outpoint"
|
|
|
|
ROWID = "rowid"
|
|
|
|
|
|
|
|
|
|
|
|
FileID = _FileID()
|
2016-10-19 01:09:35 +02:00
|
|
|
|
|
|
|
|
2016-02-16 19:39:08 +01:00
|
|
|
# TODO add login credentials in a conf file
|
|
|
|
# TODO alert if your copy of a lbry file is out of date with the name record
|
2016-01-12 00:01:46 +01:00
|
|
|
|
2016-12-19 19:27:45 +01:00
|
|
|
|
2016-10-20 21:52:37 +02:00
|
|
|
class NoValidSearch(Exception):
|
|
|
|
pass
|
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
|
2016-09-29 23:42:31 +02:00
|
|
|
class CheckInternetConnection(object):
|
|
|
|
def __init__(self, daemon):
|
|
|
|
self.daemon = daemon
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
self.daemon.connected_to_internet = utils.check_connection()
|
|
|
|
|
|
|
|
|
2016-10-11 19:50:44 +02:00
|
|
|
class AlwaysSend(object):
|
|
|
|
def __init__(self, value_generator, *args, **kwargs):
|
|
|
|
self.value_generator = value_generator
|
|
|
|
self.args = args
|
|
|
|
self.kwargs = kwargs
|
|
|
|
|
|
|
|
def __call__(self):
|
|
|
|
d = defer.maybeDeferred(self.value_generator, *self.args, **self.kwargs)
|
|
|
|
d.addCallback(lambda v: (True, v))
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2017-01-20 19:47:53 +01:00
|
|
|
# If an instance has a lot of blobs, this call might get very expensive.
|
|
|
|
# For reflector, with 50k blobs, it definitely has an impact on the first run
|
|
|
|
# But doesn't seem to impact performance after that.
|
2017-01-20 17:54:36 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-10-05 21:45:17 +02:00
|
|
|
def calculate_available_blob_size(blob_manager):
|
2017-01-20 17:54:36 +01:00
|
|
|
blob_hashes = yield blob_manager.get_all_verified_blobs()
|
2017-02-14 20:18:42 +01:00
|
|
|
blobs = yield defer.DeferredList([blob_manager.get_blob(b) for b in blob_hashes])
|
2017-01-20 17:54:36 +01:00
|
|
|
defer.returnValue(sum(b.length for success, b in blobs if success and b.length))
|
2016-09-30 06:35:15 +02:00
|
|
|
|
2016-09-29 23:51:48 +02:00
|
|
|
|
2016-10-07 23:11:43 +02:00
|
|
|
class Daemon(AuthJSONRPCServer):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-03-24 03:27:48 +01:00
|
|
|
LBRYnet daemon, a jsonrpc interface to lbry functions
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-04-12 04:28:46 +02:00
|
|
|
|
2017-07-19 17:42:17 +02:00
|
|
|
allowed_during_startup = [
|
|
|
|
'daemon_stop', 'status', 'version',
|
|
|
|
]
|
|
|
|
|
2017-08-02 21:50:17 +02:00
|
|
|
def __init__(self, analytics_manager):
|
2017-01-17 04:23:20 +01:00
|
|
|
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
|
|
|
|
self.db_dir = conf.settings['data_dir']
|
|
|
|
self.download_directory = conf.settings['download_directory']
|
|
|
|
if conf.settings['BLOBFILES_DIR'] == "blobfiles":
|
2016-10-19 06:12:44 +02:00
|
|
|
self.blobfile_dir = os.path.join(self.db_dir, "blobfiles")
|
|
|
|
else:
|
2017-01-17 04:23:20 +01:00
|
|
|
log.info("Using non-default blobfiles directory: %s", conf.settings['BLOBFILES_DIR'])
|
|
|
|
self.blobfile_dir = conf.settings['BLOBFILES_DIR']
|
|
|
|
self.data_rate = conf.settings['data_rate']
|
|
|
|
self.max_key_fee = conf.settings['max_key_fee']
|
2017-07-06 20:53:16 +02:00
|
|
|
self.disable_max_key_fee = conf.settings['disable_max_key_fee']
|
2017-01-17 04:23:20 +01:00
|
|
|
self.download_timeout = conf.settings['download_timeout']
|
|
|
|
self.run_reflector_server = conf.settings['run_reflector_server']
|
|
|
|
self.wallet_type = conf.settings['wallet']
|
|
|
|
self.delete_blobs_on_remove = conf.settings['delete_blobs_on_remove']
|
|
|
|
self.peer_port = conf.settings['peer_port']
|
|
|
|
self.reflector_port = conf.settings['reflector_port']
|
|
|
|
self.dht_node_port = conf.settings['dht_node_port']
|
|
|
|
self.use_upnp = conf.settings['use_upnp']
|
2016-04-12 04:28:46 +02:00
|
|
|
|
2016-04-12 08:03:57 +02:00
|
|
|
self.startup_status = STARTUP_STAGES[0]
|
2016-04-18 19:21:53 +02:00
|
|
|
self.connected_to_internet = True
|
2017-01-03 20:13:01 +01:00
|
|
|
self.connection_status_code = None
|
2016-11-10 21:49:51 +01:00
|
|
|
self.platform = None
|
2017-07-26 19:48:58 +02:00
|
|
|
self.current_db_revision = 4
|
2016-10-14 08:13:37 +02:00
|
|
|
self.db_revision_file = conf.settings.get_db_revision_filename()
|
2016-04-12 04:28:46 +02:00
|
|
|
self.session = None
|
2016-07-20 08:36:55 +02:00
|
|
|
self.uploaded_temp_files = []
|
2017-01-17 04:23:20 +01:00
|
|
|
self._session_id = conf.settings.get_session_id()
|
2016-10-22 01:12:38 +02:00
|
|
|
# TODO: this should probably be passed into the daemon, or
|
|
|
|
# possibly have the entire log upload functionality taken out
|
|
|
|
# of the daemon, but I don't want to deal with that now
|
2016-10-27 17:34:48 +02:00
|
|
|
|
2016-11-22 21:44:47 +01:00
|
|
|
self.analytics_manager = analytics_manager
|
2017-10-10 19:15:25 +02:00
|
|
|
self.node_id = conf.settings.node_id
|
2016-09-16 06:14:25 +02:00
|
|
|
|
2016-05-14 23:36:30 +02:00
|
|
|
self.wallet_user = None
|
|
|
|
self.wallet_password = None
|
2016-10-19 06:12:44 +02:00
|
|
|
self.query_handlers = {}
|
|
|
|
self.waiting_on = {}
|
|
|
|
self.streams = {}
|
|
|
|
self.exchange_rate_manager = ExchangeRateManager()
|
2016-10-19 01:09:35 +02:00
|
|
|
calls = {
|
|
|
|
Checker.INTERNET_CONNECTION: LoopingCall(CheckInternetConnection(self)),
|
2017-01-03 20:13:01 +01:00
|
|
|
Checker.CONNECTION_STATUS: LoopingCall(self._update_connection_status),
|
2016-10-19 01:09:35 +02:00
|
|
|
}
|
|
|
|
self.looping_call_manager = LoopingCallManager(calls)
|
2016-05-14 23:36:30 +02:00
|
|
|
self.sd_identifier = StreamDescriptorIdentifier()
|
2017-03-21 18:06:52 +01:00
|
|
|
self.stream_info_manager = None
|
2016-05-14 23:36:30 +02:00
|
|
|
self.lbry_file_manager = None
|
|
|
|
|
2016-12-28 18:20:04 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-06-22 00:17:54 +02:00
|
|
|
def setup(self):
|
2017-04-30 21:50:33 +02:00
|
|
|
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
|
|
|
|
|
2017-07-13 20:49:25 +02:00
|
|
|
configure_loggly_handler()
|
2016-12-18 02:01:06 +01:00
|
|
|
|
2017-03-08 16:21:12 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-04-07 09:12:09 +02:00
|
|
|
def _announce_startup():
|
2016-04-18 01:54:04 +02:00
|
|
|
def _announce():
|
|
|
|
self.announced_startup = True
|
|
|
|
self.startup_status = STARTUP_STAGES[5]
|
2016-07-16 08:15:58 +02:00
|
|
|
log.info("Started lbrynet-daemon")
|
2017-02-06 00:07:46 +01:00
|
|
|
log.info("%i blobs in manager", len(self.session.blob_manager.blobs))
|
2016-05-30 21:49:25 +02:00
|
|
|
|
2017-03-08 16:21:12 +01:00
|
|
|
yield self.session.blob_manager.get_all_verified_blobs()
|
|
|
|
yield _announce()
|
2016-01-11 21:52:38 +01:00
|
|
|
|
2016-07-16 08:15:58 +02:00
|
|
|
log.info("Starting lbrynet-daemon")
|
2016-02-25 23:17:07 +01:00
|
|
|
|
2016-10-19 01:09:35 +02:00
|
|
|
self.looping_call_manager.start(Checker.INTERNET_CONNECTION, 3600)
|
2017-02-02 16:18:09 +01:00
|
|
|
self.looping_call_manager.start(Checker.CONNECTION_STATUS, 30)
|
2016-07-28 11:30:13 +02:00
|
|
|
self.exchange_rate_manager.start()
|
|
|
|
|
2016-12-28 18:20:04 +01:00
|
|
|
yield self._initial_setup()
|
|
|
|
yield threads.deferToThread(self._setup_data_directory)
|
|
|
|
yield self._check_db_migration()
|
|
|
|
yield self._get_session()
|
|
|
|
yield self._get_analytics()
|
|
|
|
yield add_lbry_file_to_sd_identifier(self.sd_identifier)
|
|
|
|
yield self._setup_stream_identifier()
|
|
|
|
yield self._setup_lbry_file_manager()
|
|
|
|
yield self._setup_query_handlers()
|
|
|
|
yield self._setup_server()
|
2017-01-12 18:51:44 +01:00
|
|
|
log.info("Starting balance: " + str(self.session.wallet.get_balance()))
|
2016-12-28 18:20:04 +01:00
|
|
|
yield _announce_startup()
|
2016-09-28 18:07:25 +02:00
|
|
|
|
2016-05-30 21:49:25 +02:00
|
|
|
def _get_platform(self):
|
2016-11-10 21:49:51 +01:00
|
|
|
if self.platform is None:
|
2016-12-10 21:01:29 +01:00
|
|
|
self.platform = system_info.get_platform()
|
2016-11-10 21:49:51 +01:00
|
|
|
return self.platform
|
2016-05-30 21:49:25 +02:00
|
|
|
|
2016-03-24 03:27:48 +01:00
|
|
|
def _initial_setup(self):
|
2016-04-07 09:12:09 +02:00
|
|
|
def _log_platform():
|
2016-07-20 19:00:34 +02:00
|
|
|
log.info("Platform: %s", json.dumps(self._get_platform()))
|
2016-04-07 09:12:09 +02:00
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
d = _log_platform()
|
|
|
|
return d
|
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
def _check_network_connection(self):
|
2016-10-18 03:00:24 +02:00
|
|
|
self.connected_to_internet = utils.check_connection()
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-04-22 08:45:05 +02:00
|
|
|
def _check_lbrynet_connection(self):
|
|
|
|
def _log_success():
|
2016-07-16 08:15:58 +02:00
|
|
|
log.info("lbrynet connectivity test passed")
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2016-04-22 08:45:05 +02:00
|
|
|
def _log_failure():
|
2016-07-16 08:15:58 +02:00
|
|
|
log.info("lbrynet connectivity test failed")
|
2016-04-22 08:45:05 +02:00
|
|
|
|
2016-11-30 21:20:45 +01:00
|
|
|
wonderfullife_sh = ("6f3af0fa3924be98a54766aa2715d22c6c1509c3f7fa32566df4899"
|
|
|
|
"a41f3530a9f97b2ecb817fa1dcbf1b30553aefaa7")
|
2016-04-22 08:45:05 +02:00
|
|
|
d = download_sd_blob(self.session, wonderfullife_sh, self.session.base_payment_rate_manager)
|
|
|
|
d.addCallbacks(lambda _: _log_success, lambda _: _log_failure)
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def _update_connection_status(self):
|
|
|
|
self.connection_status_code = CONNECTION_STATUS_CONNECTED
|
|
|
|
|
2016-04-18 19:21:53 +02:00
|
|
|
if not self.connected_to_internet:
|
2017-01-03 20:13:01 +01:00
|
|
|
self.connection_status_code = CONNECTION_STATUS_NETWORK
|
2016-04-18 19:21:53 +02:00
|
|
|
|
|
|
|
def _start_server(self):
|
2016-01-26 02:28:05 +01:00
|
|
|
if self.peer_port is not None:
|
|
|
|
server_factory = ServerProtocolFactory(self.session.rate_limiter,
|
|
|
|
self.query_handlers,
|
|
|
|
self.session.peer_manager)
|
2016-08-09 18:07:26 +02:00
|
|
|
|
2016-01-26 02:28:05 +01:00
|
|
|
try:
|
2017-07-01 20:27:25 +02:00
|
|
|
log.info("Daemon bound to port: %d", self.peer_port)
|
2016-01-26 02:28:05 +01:00
|
|
|
self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory)
|
|
|
|
except error.CannotListenError as e:
|
|
|
|
import traceback
|
2017-07-12 20:21:30 +02:00
|
|
|
log.error("Couldn't bind to port %d. Visit lbry.io/faq/how-to-change-port for"
|
|
|
|
" more details.", self.peer_port)
|
2017-07-01 20:27:25 +02:00
|
|
|
log.error("%s", traceback.format_exc())
|
2016-01-26 02:28:05 +01:00
|
|
|
raise ValueError("%s lbrynet may already be running on your computer.", str(e))
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2016-08-09 18:07:26 +02:00
|
|
|
def _start_reflector(self):
|
2016-08-09 18:18:46 +02:00
|
|
|
if self.run_reflector_server:
|
2016-08-11 07:11:18 +02:00
|
|
|
log.info("Starting reflector server")
|
2016-08-09 18:18:46 +02:00
|
|
|
if self.reflector_port is not None:
|
2017-02-10 16:56:22 +01:00
|
|
|
reflector_factory = reflector_server_factory(
|
2016-08-10 14:44:41 +02:00
|
|
|
self.session.peer_manager,
|
2017-08-22 20:13:00 +02:00
|
|
|
self.session.blob_manager,
|
|
|
|
self.stream_info_manager
|
2016-08-10 14:44:41 +02:00
|
|
|
)
|
2016-08-09 18:18:46 +02:00
|
|
|
try:
|
2017-02-10 16:56:22 +01:00
|
|
|
self.reflector_server_port = reactor.listenTCP(self.reflector_port,
|
|
|
|
reflector_factory)
|
2016-08-11 02:04:03 +02:00
|
|
|
log.info('Started reflector on port %s', self.reflector_port)
|
2016-08-09 18:18:46 +02:00
|
|
|
except error.CannotListenError as e:
|
2016-08-10 14:44:41 +02:00
|
|
|
log.exception("Couldn't bind reflector to port %d", self.reflector_port)
|
2016-11-30 21:20:45 +01:00
|
|
|
raise ValueError(
|
|
|
|
"{} lbrynet may already be running on your computer.".format(e))
|
2016-08-09 18:07:26 +02:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def _stop_reflector(self):
|
2016-08-09 18:18:46 +02:00
|
|
|
if self.run_reflector_server:
|
2016-08-10 22:51:46 +02:00
|
|
|
log.info("Stopping reflector server")
|
2016-08-09 18:18:46 +02:00
|
|
|
try:
|
|
|
|
if self.reflector_server_port is not None:
|
|
|
|
self.reflector_server_port, p = None, self.reflector_server_port
|
|
|
|
return defer.maybeDeferred(p.stopListening)
|
|
|
|
except AttributeError:
|
2016-08-09 18:07:26 +02:00
|
|
|
return defer.succeed(True)
|
2016-08-09 18:18:46 +02:00
|
|
|
return defer.succeed(True)
|
2016-08-09 18:07:26 +02:00
|
|
|
|
2016-11-10 21:49:51 +01:00
|
|
|
def _stop_file_manager(self):
|
|
|
|
if self.lbry_file_manager:
|
|
|
|
self.lbry_file_manager.stop()
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2016-01-26 02:28:05 +01:00
|
|
|
def _stop_server(self):
|
2016-05-30 21:49:25 +02:00
|
|
|
try:
|
|
|
|
if self.lbry_server_port is not None:
|
2017-02-02 05:40:03 +01:00
|
|
|
self.lbry_server_port, old_port = None, self.lbry_server_port
|
2017-08-02 21:48:07 +02:00
|
|
|
log.info('Stop listening on port %s', old_port.port)
|
2017-02-02 05:40:03 +01:00
|
|
|
return defer.maybeDeferred(old_port.stopListening)
|
2016-05-30 21:49:25 +02:00
|
|
|
else:
|
|
|
|
return defer.succeed(True)
|
|
|
|
except AttributeError:
|
2016-01-26 02:28:05 +01:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def _setup_server(self):
|
2016-04-12 04:28:46 +02:00
|
|
|
self.startup_status = STARTUP_STAGES[4]
|
2016-12-18 02:01:06 +01:00
|
|
|
d = self._start_server()
|
|
|
|
d.addCallback(lambda _: self._start_reflector())
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
|
|
|
def _setup_query_handlers(self):
|
|
|
|
handlers = [
|
2016-10-18 16:40:13 +02:00
|
|
|
BlobRequestHandlerFactory(
|
|
|
|
self.session.blob_manager,
|
|
|
|
self.session.wallet,
|
|
|
|
self.session.payment_rate_manager,
|
2017-04-26 20:15:38 +02:00
|
|
|
self.analytics_manager
|
2016-10-18 16:40:13 +02:00
|
|
|
),
|
2016-01-26 02:28:05 +01:00
|
|
|
self.session.wallet.get_wallet_info_query_handler_factory(),
|
|
|
|
]
|
2016-12-18 02:01:06 +01:00
|
|
|
return self._add_query_handlers(handlers)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
|
|
|
def _add_query_handlers(self, query_handlers):
|
|
|
|
for handler in query_handlers:
|
2016-10-22 01:12:38 +02:00
|
|
|
query_id = handler.get_primary_query_identifier()
|
2016-12-18 02:01:06 +01:00
|
|
|
self.query_handlers[query_id] = handler
|
|
|
|
return defer.succeed(None)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-07-20 08:36:55 +02:00
|
|
|
def _clean_up_temp_files(self):
|
|
|
|
for path in self.uploaded_temp_files:
|
|
|
|
try:
|
2016-11-04 19:15:27 +01:00
|
|
|
log.debug('Removing tmp file: %s', path)
|
2016-07-20 08:36:55 +02:00
|
|
|
os.remove(path)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2017-08-02 21:51:25 +02:00
|
|
|
@staticmethod
|
|
|
|
def _already_shutting_down(sig_num, frame):
|
|
|
|
log.info("Already shutting down")
|
|
|
|
|
2017-09-28 19:51:20 +02:00
|
|
|
def _stop_streams(self):
|
|
|
|
"""stop pending GetStream downloads"""
|
|
|
|
for claim_id, stream in self.streams.iteritems():
|
|
|
|
stream.cancel(reason="daemon shutdown")
|
|
|
|
|
2016-01-11 21:52:38 +01:00
|
|
|
def _shutdown(self):
|
2017-08-02 21:51:25 +02:00
|
|
|
# ignore INT/TERM signals once shutdown has started
|
|
|
|
signal.signal(signal.SIGINT, self._already_shutting_down)
|
|
|
|
signal.signal(signal.SIGTERM, self._already_shutting_down)
|
|
|
|
|
2016-03-24 03:27:48 +01:00
|
|
|
log.info("Closing lbrynet session")
|
2016-04-18 01:54:04 +02:00
|
|
|
log.info("Status at time of shutdown: " + self.startup_status[0])
|
2017-09-28 19:51:20 +02:00
|
|
|
|
|
|
|
self._stop_streams()
|
|
|
|
|
2016-09-29 23:42:31 +02:00
|
|
|
self.looping_call_manager.shutdown()
|
2016-10-25 23:48:15 +02:00
|
|
|
if self.analytics_manager:
|
|
|
|
self.analytics_manager.shutdown()
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2016-07-20 08:36:55 +02:00
|
|
|
self._clean_up_temp_files()
|
|
|
|
|
2017-03-08 16:21:12 +01:00
|
|
|
d = self._stop_server()
|
2016-12-10 20:42:57 +01:00
|
|
|
d.addErrback(log.fail(), 'Failure while shutting down')
|
2016-08-09 18:07:26 +02:00
|
|
|
d.addCallback(lambda _: self._stop_reflector())
|
2016-12-10 20:42:57 +01:00
|
|
|
d.addErrback(log.fail(), 'Failure while shutting down')
|
2016-11-10 21:49:51 +01:00
|
|
|
d.addCallback(lambda _: self._stop_file_manager())
|
2016-12-10 20:42:57 +01:00
|
|
|
d.addErrback(log.fail(), 'Failure while shutting down')
|
2016-01-11 21:52:38 +01:00
|
|
|
if self.session is not None:
|
2016-01-26 02:28:05 +01:00
|
|
|
d.addCallback(lambda _: self.session.shut_down())
|
2016-12-10 20:42:57 +01:00
|
|
|
d.addErrback(log.fail(), 'Failure while shutting down')
|
2016-01-11 21:52:38 +01:00
|
|
|
return d
|
|
|
|
|
2016-03-24 03:27:48 +01:00
|
|
|
def _update_settings(self, settings):
|
2016-10-19 06:12:44 +02:00
|
|
|
setting_types = {
|
|
|
|
'download_directory': str,
|
2017-06-22 00:17:54 +02:00
|
|
|
'data_rate': float,
|
2016-10-19 06:12:44 +02:00
|
|
|
'download_timeout': int,
|
2017-07-01 20:27:25 +02:00
|
|
|
'peer_port': int,
|
2017-06-22 00:17:54 +02:00
|
|
|
'max_key_fee': dict,
|
|
|
|
'use_upnp': bool,
|
|
|
|
'run_reflector_server': bool,
|
2017-03-29 17:20:33 +02:00
|
|
|
'cache_time': int,
|
2017-06-22 00:17:54 +02:00
|
|
|
'reflect_uploads': bool,
|
2017-04-27 02:02:00 +02:00
|
|
|
'share_usage_data': bool,
|
2017-07-06 20:53:16 +02:00
|
|
|
'disable_max_key_fee': bool,
|
2017-06-22 00:17:54 +02:00
|
|
|
'peer_search_timeout': int,
|
|
|
|
'sd_download_timeout': int,
|
2016-10-19 06:12:44 +02:00
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
for key, setting_type in setting_types.iteritems():
|
|
|
|
if key in settings:
|
2017-06-22 00:17:54 +02:00
|
|
|
if isinstance(settings[key], setting_type):
|
2017-01-17 18:29:09 +01:00
|
|
|
conf.settings.update({key: settings[key]},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
2017-06-22 00:17:54 +02:00
|
|
|
elif setting_type is dict and isinstance(settings[key], (unicode, str)):
|
|
|
|
decoded = json.loads(str(settings[key]))
|
|
|
|
conf.settings.update({key: decoded},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
2016-10-19 06:12:44 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
converted = setting_type(settings[key])
|
2017-01-17 18:29:09 +01:00
|
|
|
conf.settings.update({key: converted},
|
|
|
|
data_types=(conf.TYPE_RUNTIME, conf.TYPE_PERSISTED))
|
2016-10-19 06:12:44 +02:00
|
|
|
except Exception as err:
|
|
|
|
log.warning(err.message)
|
2017-06-22 00:17:54 +02:00
|
|
|
log.warning("error converting setting '%s' to type %s from type %s", key,
|
|
|
|
setting_type, str(type(settings[key])))
|
2017-01-17 04:23:20 +01:00
|
|
|
conf.settings.save_conf_file_settings()
|
|
|
|
|
|
|
|
self.data_rate = conf.settings['data_rate']
|
|
|
|
self.max_key_fee = conf.settings['max_key_fee']
|
2017-07-06 20:53:16 +02:00
|
|
|
self.disable_max_key_fee = conf.settings['disable_max_key_fee']
|
2017-01-17 04:23:20 +01:00
|
|
|
self.download_directory = conf.settings['download_directory']
|
|
|
|
self.download_timeout = conf.settings['download_timeout']
|
2016-03-24 03:27:48 +01:00
|
|
|
|
|
|
|
return defer.succeed(True)
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2016-11-28 20:23:10 +01:00
|
|
|
def _write_db_revision_file(self, version_num):
|
2016-10-14 08:13:37 +02:00
|
|
|
with open(self.db_revision_file, mode='w') as db_revision:
|
|
|
|
db_revision.write(str(version_num))
|
|
|
|
|
2015-12-14 16:22:52 +01:00
|
|
|
def _setup_data_directory(self):
|
2016-10-14 08:13:37 +02:00
|
|
|
old_revision = 1
|
2016-04-09 08:09:30 +02:00
|
|
|
self.startup_status = STARTUP_STAGES[1]
|
2017-01-04 23:10:36 +01:00
|
|
|
log.info("Loading databases")
|
2017-03-21 18:06:52 +01:00
|
|
|
if not os.path.exists(self.download_directory):
|
|
|
|
os.mkdir(self.download_directory)
|
|
|
|
if not os.path.exists(self.db_dir):
|
|
|
|
os.mkdir(self.db_dir)
|
2016-10-14 08:13:37 +02:00
|
|
|
self._write_db_revision_file(self.current_db_revision)
|
|
|
|
log.debug("Created the db revision file: %s", self.db_revision_file)
|
2015-12-14 16:22:52 +01:00
|
|
|
if not os.path.exists(self.blobfile_dir):
|
|
|
|
os.mkdir(self.blobfile_dir)
|
|
|
|
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
|
2016-11-28 20:23:10 +01:00
|
|
|
if not os.path.exists(self.db_revision_file):
|
2016-10-14 08:13:37 +02:00
|
|
|
log.warning("db_revision file not found. Creating it")
|
|
|
|
self._write_db_revision_file(old_revision)
|
2015-12-14 16:22:52 +01:00
|
|
|
|
|
|
|
def _check_db_migration(self):
|
2016-09-01 02:20:07 +02:00
|
|
|
old_revision = 1
|
2016-10-14 08:13:37 +02:00
|
|
|
if os.path.exists(self.db_revision_file):
|
|
|
|
old_revision = int(open(self.db_revision_file).read().strip())
|
|
|
|
|
2016-10-25 23:49:08 +02:00
|
|
|
if old_revision > self.current_db_revision:
|
2017-09-01 16:32:47 +02:00
|
|
|
raise Exception('This version of lbrynet is not compatible with the database\n'
|
2017-09-01 16:43:46 +02:00
|
|
|
'Your database is revision %i, expected %i' %
|
|
|
|
(old_revision, self.current_db_revision))
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
def update_version_file_and_print_success():
|
|
|
|
self._write_db_revision_file(self.current_db_revision)
|
|
|
|
log.info("Finished upgrading the databases.")
|
|
|
|
|
2015-12-14 16:22:52 +01:00
|
|
|
if old_revision < self.current_db_revision:
|
|
|
|
from lbrynet.db_migrator import dbmigrator
|
2017-01-04 23:10:36 +01:00
|
|
|
log.info("Upgrading your databases")
|
2016-11-30 21:20:45 +01:00
|
|
|
d = threads.deferToThread(
|
|
|
|
dbmigrator.migrate_db, self.db_dir, old_revision, self.current_db_revision)
|
2016-10-14 08:13:37 +02:00
|
|
|
d.addCallback(lambda _: update_version_file_and_print_success())
|
2015-12-14 16:22:52 +01:00
|
|
|
return d
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2017-02-14 17:27:16 +01:00
|
|
|
@defer.inlineCallbacks
|
2015-12-06 23:32:17 +01:00
|
|
|
def _setup_lbry_file_manager(self):
|
2017-02-14 17:27:16 +01:00
|
|
|
log.info('Starting to setup up file manager')
|
2016-04-12 04:28:46 +02:00
|
|
|
self.startup_status = STARTUP_STAGES[3]
|
2017-03-21 18:06:52 +01:00
|
|
|
self.stream_info_manager = DBEncryptedFileMetadataManager(self.db_dir)
|
2017-02-14 17:27:16 +01:00
|
|
|
yield self.stream_info_manager.setup()
|
|
|
|
self.lbry_file_manager = EncryptedFileManager(
|
|
|
|
self.session,
|
|
|
|
self.stream_info_manager,
|
|
|
|
self.sd_identifier,
|
|
|
|
download_directory=self.download_directory
|
|
|
|
)
|
|
|
|
yield self.lbry_file_manager.setup()
|
|
|
|
log.info('Done setting up file manager')
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2016-10-05 20:51:26 +02:00
|
|
|
def _get_analytics(self):
|
2016-11-10 21:49:51 +01:00
|
|
|
if not self.analytics_manager.is_started:
|
|
|
|
self.analytics_manager.start()
|
|
|
|
self.analytics_manager.register_repeating_metric(
|
|
|
|
analytics.BLOB_BYTES_AVAILABLE,
|
|
|
|
AlwaysSend(calculate_available_blob_size, self.session.blob_manager),
|
|
|
|
frequency=300
|
|
|
|
)
|
2016-10-05 20:51:26 +02:00
|
|
|
|
2015-12-06 23:32:17 +01:00
|
|
|
def _get_session(self):
|
|
|
|
def get_wallet():
|
2016-11-10 20:26:21 +01:00
|
|
|
if self.wallet_type == LBRYCRD_WALLET:
|
2016-12-01 17:59:36 +01:00
|
|
|
raise ValueError('LBRYcrd Wallet is no longer supported')
|
2016-11-10 20:26:21 +01:00
|
|
|
elif self.wallet_type == LBRYUM_WALLET:
|
2016-07-09 19:31:07 +02:00
|
|
|
log.info("Using lbryum wallet")
|
2016-11-20 01:29:58 +01:00
|
|
|
config = {'auto_connect': True}
|
2017-01-17 04:23:20 +01:00
|
|
|
if conf.settings['lbryum_wallet_dir']:
|
|
|
|
config['lbryum_path'] = conf.settings['lbryum_wallet_dir']
|
2016-12-01 06:28:25 +01:00
|
|
|
storage = SqliteStorage(self.db_dir)
|
|
|
|
wallet = LBRYumWallet(storage, config)
|
|
|
|
return defer.succeed(wallet)
|
2016-11-10 20:26:21 +01:00
|
|
|
elif self.wallet_type == PTC_WALLET:
|
2016-02-25 23:17:07 +01:00
|
|
|
log.info("Using PTC wallet")
|
2016-12-18 02:03:35 +01:00
|
|
|
from lbrynet.core.PTCWallet import PTCWallet
|
2016-12-18 02:01:06 +01:00
|
|
|
return defer.succeed(PTCWallet(self.db_dir))
|
2016-02-23 04:32:07 +01:00
|
|
|
else:
|
2016-10-04 20:59:04 +02:00
|
|
|
raise ValueError('Wallet Type {} is not valid'.format(self.wallet_type))
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2016-12-18 02:01:06 +01:00
|
|
|
d = get_wallet()
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2016-12-18 02:01:06 +01:00
|
|
|
def create_session(wallet):
|
2016-11-30 21:20:45 +01:00
|
|
|
self.session = Session(
|
2017-01-17 04:23:20 +01:00
|
|
|
conf.settings['data_rate'],
|
2016-11-30 21:20:45 +01:00
|
|
|
db_dir=self.db_dir,
|
2017-10-10 19:15:25 +02:00
|
|
|
node_id=self.node_id,
|
2016-11-30 21:20:45 +01:00
|
|
|
blob_dir=self.blobfile_dir,
|
|
|
|
dht_node_port=self.dht_node_port,
|
2017-01-17 04:23:20 +01:00
|
|
|
known_dht_nodes=conf.settings['known_dht_nodes'],
|
2016-11-30 21:20:45 +01:00
|
|
|
peer_port=self.peer_port,
|
|
|
|
use_upnp=self.use_upnp,
|
2016-12-18 02:01:06 +01:00
|
|
|
wallet=wallet,
|
2017-10-23 07:17:25 +02:00
|
|
|
is_generous=conf.settings['is_generous_host'],
|
|
|
|
external_ip=self.platform['ip']
|
2016-11-30 21:20:45 +01:00
|
|
|
)
|
2016-04-12 04:28:46 +02:00
|
|
|
self.startup_status = STARTUP_STAGES[2]
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2016-12-18 02:01:06 +01:00
|
|
|
d.addCallback(create_session)
|
|
|
|
d.addCallback(lambda _: self.session.setup())
|
|
|
|
return d
|
2015-12-06 23:32:17 +01:00
|
|
|
|
|
|
|
def _setup_stream_identifier(self):
|
2016-11-30 21:20:45 +01:00
|
|
|
file_saver_factory = EncryptedFileSaverFactory(
|
|
|
|
self.session.peer_finder,
|
|
|
|
self.session.rate_limiter,
|
|
|
|
self.session.blob_manager,
|
|
|
|
self.stream_info_manager,
|
|
|
|
self.session.wallet,
|
|
|
|
self.download_directory
|
|
|
|
)
|
2017-09-13 22:42:17 +02:00
|
|
|
self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType,
|
|
|
|
file_saver_factory)
|
2015-12-06 23:32:17 +01:00
|
|
|
return defer.succeed(None)
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
def _download_blob(self, blob_hash, rate_manager=None, timeout=None):
|
|
|
|
"""
|
|
|
|
Download a blob
|
|
|
|
|
|
|
|
:param blob_hash (str): blob hash
|
|
|
|
:param rate_manager (PaymentRateManager), optional: the payment rate manager to use,
|
|
|
|
defaults to session.payment_rate_manager
|
|
|
|
:param timeout (int): blob timeout
|
|
|
|
:return: BlobFile
|
|
|
|
"""
|
2017-04-10 19:26:47 +02:00
|
|
|
if not blob_hash:
|
|
|
|
raise Exception("Nothing to download")
|
2016-08-02 08:54:04 +02:00
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
rate_manager = rate_manager or self.session.payment_rate_manager
|
|
|
|
timeout = timeout or 30
|
2017-06-21 19:35:15 +02:00
|
|
|
return download_sd_blob(self.session, blob_hash, rate_manager, timeout)
|
2016-08-02 05:15:32 +02:00
|
|
|
|
2016-12-30 07:12:20 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-09-27 23:02:36 +02:00
|
|
|
def _get_stream_analytics_report(self, claim_dict):
|
|
|
|
sd_hash = claim_dict.source_hash
|
|
|
|
try:
|
|
|
|
stream_hash = yield self.stream_info_manager.get_stream_hash_for_sd_hash(sd_hash)
|
2017-09-28 19:54:01 +02:00
|
|
|
except Exception:
|
2017-09-27 23:02:36 +02:00
|
|
|
stream_hash = None
|
|
|
|
report = {
|
|
|
|
"sd_hash": sd_hash,
|
|
|
|
"stream_hash": stream_hash,
|
|
|
|
}
|
|
|
|
blobs = {}
|
2017-09-28 19:54:01 +02:00
|
|
|
try:
|
|
|
|
sd_host = yield self.session.blob_manager.get_host_downloaded_from(sd_hash)
|
|
|
|
except Exception:
|
|
|
|
sd_host = None
|
2017-09-27 23:02:36 +02:00
|
|
|
report["sd_blob"] = sd_host
|
|
|
|
if stream_hash:
|
|
|
|
blob_infos = yield self.stream_info_manager.get_blobs_for_stream(stream_hash)
|
|
|
|
report["known_blobs"] = len(blob_infos)
|
|
|
|
else:
|
|
|
|
blob_infos = []
|
|
|
|
report["known_blobs"] = 0
|
2017-10-02 17:21:51 +02:00
|
|
|
# for blob_hash, blob_num, iv, length in blob_infos:
|
|
|
|
# try:
|
|
|
|
# host = yield self.session.blob_manager.get_host_downloaded_from(blob_hash)
|
|
|
|
# except Exception:
|
|
|
|
# host = None
|
|
|
|
# if host:
|
|
|
|
# blobs[blob_num] = host
|
|
|
|
# report["blobs"] = json.dumps(blobs)
|
2017-09-27 23:02:36 +02:00
|
|
|
defer.returnValue(report)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2017-09-28 19:53:12 +02:00
|
|
|
def _download_name(self, name, claim_dict, claim_id, timeout=None, file_name=None):
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
2016-05-04 11:20:38 +02:00
|
|
|
Add a lbry file to the file manager, start the download, and return the new lbry file.
|
|
|
|
If it already exists in the file manager, return the existing lbry file
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
2017-03-29 04:04:13 +02:00
|
|
|
|
2017-09-28 19:53:12 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _download_finished(download_id, name, claim_dict):
|
|
|
|
report = yield self._get_stream_analytics_report(claim_dict)
|
|
|
|
self.analytics_manager.send_download_finished(download_id, name, report, claim_dict)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _download_failed(error, download_id, name, claim_dict):
|
|
|
|
report = yield self._get_stream_analytics_report(claim_dict)
|
|
|
|
self.analytics_manager.send_download_errored(error, download_id, name, claim_dict,
|
|
|
|
report)
|
|
|
|
|
2017-04-07 02:40:55 +02:00
|
|
|
if claim_id in self.streams:
|
|
|
|
downloader = self.streams[claim_id]
|
|
|
|
result = yield downloader.finished_deferred
|
|
|
|
defer.returnValue(result)
|
|
|
|
else:
|
|
|
|
download_id = utils.random_string()
|
2017-06-02 20:00:13 +02:00
|
|
|
self.analytics_manager.send_download_started(download_id, name, claim_dict)
|
2016-12-30 18:37:11 +01:00
|
|
|
|
2017-04-07 02:40:55 +02:00
|
|
|
self.streams[claim_id] = GetStream(self.sd_identifier, self.session,
|
|
|
|
self.exchange_rate_manager, self.max_key_fee,
|
2017-07-06 20:53:16 +02:00
|
|
|
self.disable_max_key_fee,
|
2017-04-07 02:40:55 +02:00
|
|
|
conf.settings['data_rate'], timeout,
|
2017-06-13 21:28:49 +02:00
|
|
|
file_name)
|
2017-04-07 02:40:55 +02:00
|
|
|
try:
|
2017-06-07 19:01:29 +02:00
|
|
|
lbry_file, finished_deferred = yield self.streams[claim_id].start(claim_dict, name)
|
2017-09-28 19:53:12 +02:00
|
|
|
finished_deferred.addCallbacks(lambda _: _download_finished(download_id, name,
|
|
|
|
claim_dict),
|
|
|
|
lambda e: _download_failed(e, download_id, name,
|
|
|
|
claim_dict))
|
|
|
|
|
2017-04-07 02:40:55 +02:00
|
|
|
result = yield self._get_lbry_file_dict(lbry_file, full_status=True)
|
2017-09-28 19:53:12 +02:00
|
|
|
except Exception as err:
|
2017-09-28 21:46:02 +02:00
|
|
|
yield _download_failed(err, download_id, name, claim_dict)
|
2017-09-28 19:53:12 +02:00
|
|
|
if isinstance(err, (DownloadDataTimeout, DownloadSDTimeout)):
|
|
|
|
log.warning('Failed to get %s (%s)', name, err)
|
|
|
|
else:
|
|
|
|
log.error('Failed to get %s (%s)', name, err)
|
|
|
|
if self.streams[claim_id].downloader:
|
2017-09-27 23:02:36 +02:00
|
|
|
yield self.streams[claim_id].downloader.stop(err)
|
2017-04-07 02:40:55 +02:00
|
|
|
result = {'error': err.message}
|
2017-09-28 19:54:01 +02:00
|
|
|
finally:
|
|
|
|
del self.streams[claim_id]
|
2017-04-07 02:40:55 +02:00
|
|
|
defer.returnValue(result)
|
2016-07-21 17:44:59 +02:00
|
|
|
|
2017-02-09 22:12:30 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-06-12 19:32:01 +02:00
|
|
|
def _publish_stream(self, name, bid, claim_dict, file_path=None, certificate_id=None,
|
|
|
|
claim_address=None, change_address=None):
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
publisher = Publisher(self.session, self.lbry_file_manager, self.session.wallet,
|
|
|
|
certificate_id)
|
2017-05-29 22:38:08 +02:00
|
|
|
parse_lbry_uri(name)
|
2017-02-09 22:12:30 +01:00
|
|
|
if bid <= 0.0:
|
|
|
|
raise Exception("Invalid bid")
|
|
|
|
if not file_path:
|
2017-06-12 19:32:01 +02:00
|
|
|
claim_out = yield publisher.publish_stream(name, bid, claim_dict, claim_address,
|
|
|
|
change_address)
|
2017-02-09 22:12:30 +01:00
|
|
|
else:
|
2017-06-12 19:32:01 +02:00
|
|
|
claim_out = yield publisher.create_and_publish_stream(name, bid, claim_dict, file_path,
|
|
|
|
claim_address, change_address)
|
2017-03-16 22:48:28 +01:00
|
|
|
if conf.settings['reflect_uploads']:
|
|
|
|
d = reupload.reflect_stream(publisher.lbry_file)
|
|
|
|
d.addCallbacks(lambda _: log.info("Reflected new publication to lbry://%s", name),
|
|
|
|
log.exception)
|
2017-04-27 02:02:00 +02:00
|
|
|
self.analytics_manager.send_claim_action('publish')
|
2017-02-09 22:12:30 +01:00
|
|
|
log.info("Success! Published to lbry://%s txid: %s nout: %d", name, claim_out['txid'],
|
|
|
|
claim_out['nout'])
|
|
|
|
defer.returnValue(claim_out)
|
|
|
|
|
2016-04-26 04:35:21 +02:00
|
|
|
def _get_long_count_timestamp(self):
|
2017-01-02 20:52:24 +01:00
|
|
|
dt = utils.utcnow() - utils.datetime_obj(year=2012, month=12, day=21)
|
|
|
|
return int(dt.total_seconds())
|
2016-01-20 03:07:16 +01:00
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-05-30 21:49:25 +02:00
|
|
|
def _resolve_name(self, name, force_refresh=False):
|
2016-07-21 17:44:59 +02:00
|
|
|
"""Resolves a name. Checks the cache first before going out to the blockchain.
|
2016-04-26 04:35:21 +02:00
|
|
|
|
2016-07-21 17:44:59 +02:00
|
|
|
Args:
|
|
|
|
name: the lbry://<name> to resolve
|
|
|
|
force_refresh: if True, always go out to the blockchain to resolve.
|
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
|
|
|
|
parsed = parse_lbry_uri(name)
|
|
|
|
resolution = yield self.session.wallet.resolve(parsed.name, check_cache=not force_refresh)
|
|
|
|
if parsed.name in resolution:
|
|
|
|
result = resolution[parsed.name]
|
|
|
|
defer.returnValue(result)
|
2016-01-12 00:01:46 +01:00
|
|
|
|
2016-11-30 22:23:48 +01:00
|
|
|
def _get_or_download_sd_blob(self, blob, sd_hash):
|
|
|
|
if blob:
|
2017-01-19 04:08:02 +01:00
|
|
|
return self.session.blob_manager.get_blob(blob[0])
|
2016-11-30 22:23:48 +01:00
|
|
|
|
|
|
|
def _check_est(downloader):
|
|
|
|
if downloader.result is not None:
|
|
|
|
downloader.cancel()
|
|
|
|
|
|
|
|
d = defer.succeed(None)
|
|
|
|
reactor.callLater(self.search_timeout, _check_est, d)
|
2016-12-07 16:10:46 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda _: download_sd_blob(
|
2016-12-18 02:01:06 +01:00
|
|
|
self.session, sd_hash, self.session.payment_rate_manager))
|
2016-11-30 22:23:48 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def get_or_download_sd_blob(self, sd_hash):
|
2016-12-07 16:10:46 +01:00
|
|
|
"""Return previously downloaded sd blob if already in the blob
|
|
|
|
manager, otherwise download and return it
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
d = self.session.blob_manager.completed_blobs([sd_hash])
|
|
|
|
d.addCallback(self._get_or_download_sd_blob, sd_hash)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_size_from_sd_blob(self, sd_blob):
|
|
|
|
"""
|
|
|
|
Get total stream size in bytes from a sd blob
|
|
|
|
"""
|
|
|
|
|
|
|
|
d = self.sd_identifier.get_metadata_for_sd_blob(sd_blob)
|
|
|
|
d.addCallback(lambda metadata: metadata.validator.info_to_show())
|
|
|
|
d.addCallback(lambda info: int(dict(info)['stream_size']))
|
|
|
|
return d
|
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
def _get_est_cost_from_stream_size(self, size):
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Calculate estimated LBC cost for a stream given its size in bytes
|
|
|
|
"""
|
|
|
|
|
|
|
|
if self.session.payment_rate_manager.generous:
|
|
|
|
return 0.0
|
2017-01-17 04:23:20 +01:00
|
|
|
return size / (10 ** 6) * conf.settings['data_rate']
|
2016-11-30 22:23:48 +01:00
|
|
|
|
2017-04-12 20:46:27 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-04-11 04:47:54 +02:00
|
|
|
def get_est_cost_using_known_size(self, uri, size):
|
2016-12-02 20:39:01 +01:00
|
|
|
"""
|
|
|
|
Calculate estimated LBC cost for a stream given its size in bytes
|
|
|
|
"""
|
|
|
|
|
|
|
|
cost = self._get_est_cost_from_stream_size(size)
|
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
resolved = yield self.session.wallet.resolve(uri)
|
|
|
|
|
|
|
|
if uri in resolved and 'claim' in resolved[uri]:
|
|
|
|
claim = ClaimDict.load_dict(resolved[uri]['claim']['value'])
|
2017-04-12 20:46:27 +02:00
|
|
|
final_fee = self._add_key_fee_to_est_data_cost(claim.source_fee, cost)
|
|
|
|
result = yield self._render_response(final_fee)
|
|
|
|
defer.returnValue(result)
|
|
|
|
else:
|
|
|
|
defer.returnValue(None)
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2016-11-30 22:23:48 +01:00
|
|
|
def get_est_cost_from_sd_hash(self, sd_hash):
|
|
|
|
"""
|
|
|
|
Get estimated cost from a sd hash
|
|
|
|
"""
|
|
|
|
|
|
|
|
d = self.get_or_download_sd_blob(sd_hash)
|
|
|
|
d.addCallback(self.get_size_from_sd_blob)
|
2016-12-02 20:39:01 +01:00
|
|
|
d.addCallback(self._get_est_cost_from_stream_size)
|
2016-11-30 00:06:16 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_est_cost_from_metadata(self, metadata, name):
|
2017-04-12 20:46:27 +02:00
|
|
|
d = self.get_est_cost_from_sd_hash(metadata.source_hash)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
|
|
|
def _handle_err(err):
|
|
|
|
if isinstance(err, Failure):
|
2016-12-07 16:10:46 +01:00
|
|
|
log.warning(
|
|
|
|
"Timeout getting blob for cost est for lbry://%s, using only key fee", name)
|
2016-11-30 00:06:16 +01:00
|
|
|
return 0.0
|
|
|
|
raise err
|
|
|
|
|
|
|
|
d.addErrback(_handle_err)
|
2017-04-12 20:46:27 +02:00
|
|
|
d.addCallback(lambda data_cost: self._add_key_fee_to_est_data_cost(metadata.source_fee,
|
|
|
|
data_cost))
|
2016-11-30 00:06:16 +01:00
|
|
|
return d
|
|
|
|
|
2017-04-12 20:46:27 +02:00
|
|
|
def _add_key_fee_to_est_data_cost(self, fee, data_cost):
|
2017-05-30 23:07:23 +02:00
|
|
|
fee_amount = 0.0 if not fee else self.exchange_rate_manager.convert_currency(fee.currency,
|
|
|
|
"LBC",
|
|
|
|
fee.amount)
|
2016-12-02 20:39:01 +01:00
|
|
|
return data_cost + fee_amount
|
|
|
|
|
2017-02-11 16:33:06 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-04-11 04:47:54 +02:00
|
|
|
def get_est_cost_from_uri(self, uri):
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
|
|
|
Resolve a name and return the estimated stream cost
|
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
|
|
|
|
resolved = yield self.session.wallet.resolve(uri)
|
|
|
|
if resolved:
|
|
|
|
claim_response = resolved[uri]
|
|
|
|
else:
|
2017-04-11 04:47:54 +02:00
|
|
|
claim_response = None
|
|
|
|
|
2017-04-13 18:59:35 +02:00
|
|
|
result = None
|
2017-04-11 04:47:54 +02:00
|
|
|
if claim_response and 'claim' in claim_response:
|
2017-04-18 18:46:41 +02:00
|
|
|
if 'value' in claim_response['claim'] and claim_response['claim']['value'] is not None:
|
2017-04-13 18:59:35 +02:00
|
|
|
claim_value = ClaimDict.load_dict(claim_response['claim']['value'])
|
|
|
|
cost = yield self._get_est_cost_from_metadata(claim_value, uri)
|
|
|
|
result = round(cost, 5)
|
|
|
|
else:
|
|
|
|
log.warning("Failed to estimate cost for %s", uri)
|
|
|
|
defer.returnValue(result)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2017-04-11 04:47:54 +02:00
|
|
|
def get_est_cost(self, uri, size=None):
|
2016-12-07 16:10:46 +01:00
|
|
|
"""Get a cost estimate for a lbry stream, if size is not provided the
|
|
|
|
sd blob will be downloaded to determine the stream size
|
|
|
|
|
2016-11-30 22:23:48 +01:00
|
|
|
"""
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2016-11-30 22:23:48 +01:00
|
|
|
if size is not None:
|
2017-04-11 04:47:54 +02:00
|
|
|
return self.get_est_cost_using_known_size(uri, size)
|
|
|
|
return self.get_est_cost_from_uri(uri)
|
2016-11-30 00:06:16 +01:00
|
|
|
|
2017-02-16 15:12:00 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-03-06 23:01:35 +01:00
|
|
|
def _get_lbry_file_dict(self, lbry_file, full_status=False):
|
|
|
|
key = binascii.b2a_hex(lbry_file.key) if lbry_file.key else None
|
|
|
|
full_path = os.path.join(lbry_file.download_directory, lbry_file.file_name)
|
|
|
|
mime_type = mimetypes.guess_type(full_path)[0]
|
|
|
|
if os.path.isfile(full_path):
|
|
|
|
with open(full_path) as written_file:
|
|
|
|
written_file.seek(0, os.SEEK_END)
|
|
|
|
written_bytes = written_file.tell()
|
|
|
|
else:
|
2017-09-12 22:00:32 +02:00
|
|
|
written_bytes = 0
|
2016-04-24 10:42:42 +02:00
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
if full_status:
|
|
|
|
size = yield lbry_file.get_total_bytes()
|
|
|
|
file_status = yield lbry_file.status()
|
|
|
|
message = STREAM_STAGES[2][1] % (file_status.name, file_status.num_completed,
|
|
|
|
file_status.num_known, file_status.running_status)
|
|
|
|
else:
|
|
|
|
size = None
|
|
|
|
message = None
|
2017-04-09 22:10:07 +02:00
|
|
|
|
2017-06-23 18:03:01 +02:00
|
|
|
claim = yield self.session.wallet.get_claim_by_claim_id(lbry_file.claim_id,
|
|
|
|
check_expire=False)
|
2017-04-10 03:44:08 +02:00
|
|
|
|
|
|
|
if claim and 'value' in claim:
|
2017-04-07 02:45:05 +02:00
|
|
|
metadata = claim['value']
|
2017-04-10 03:44:08 +02:00
|
|
|
else:
|
2017-03-06 23:01:35 +01:00
|
|
|
metadata = None
|
2017-04-11 03:32:20 +02:00
|
|
|
|
|
|
|
if claim and 'channel_name' in claim:
|
|
|
|
channel_name = claim['channel_name']
|
|
|
|
else:
|
|
|
|
channel_name = None
|
|
|
|
|
2017-04-10 03:44:08 +02:00
|
|
|
if lbry_file.txid and lbry_file.nout is not None:
|
2017-03-06 23:01:35 +01:00
|
|
|
outpoint = repr(ClaimOutpoint(lbry_file.txid, lbry_file.nout))
|
2017-04-10 03:44:08 +02:00
|
|
|
else:
|
2017-03-06 23:01:35 +01:00
|
|
|
outpoint = None
|
2016-04-24 10:42:42 +02:00
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
if claim and 'has_signature' in claim:
|
|
|
|
has_signature = claim['has_signature']
|
|
|
|
else:
|
|
|
|
has_signature = None
|
|
|
|
if claim and 'signature_is_valid' in claim:
|
|
|
|
signature_is_valid = claim['signature_is_valid']
|
|
|
|
else:
|
|
|
|
signature_is_valid = None
|
|
|
|
|
|
|
|
result = {
|
2017-03-06 23:01:35 +01:00
|
|
|
'completed': lbry_file.completed,
|
|
|
|
'file_name': lbry_file.file_name,
|
|
|
|
'download_directory': lbry_file.download_directory,
|
|
|
|
'points_paid': lbry_file.points_paid,
|
|
|
|
'stopped': lbry_file.stopped,
|
|
|
|
'stream_hash': lbry_file.stream_hash,
|
|
|
|
'stream_name': lbry_file.stream_name,
|
|
|
|
'suggested_file_name': lbry_file.suggested_file_name,
|
|
|
|
'sd_hash': lbry_file.sd_hash,
|
|
|
|
'name': lbry_file.name,
|
|
|
|
'outpoint': outpoint,
|
|
|
|
'claim_id': lbry_file.claim_id,
|
|
|
|
'download_path': full_path,
|
|
|
|
'mime_type': mime_type,
|
|
|
|
'key': key,
|
|
|
|
'total_bytes': size,
|
|
|
|
'written_bytes': written_bytes,
|
|
|
|
'message': message,
|
|
|
|
'metadata': metadata
|
2017-04-09 22:10:07 +02:00
|
|
|
}
|
2017-04-24 00:16:03 +02:00
|
|
|
if channel_name is not None:
|
|
|
|
result['channel_name'] = channel_name
|
2017-04-09 22:10:07 +02:00
|
|
|
if has_signature is not None:
|
|
|
|
result['has_signature'] = has_signature
|
|
|
|
if signature_is_valid is not None:
|
|
|
|
result['signature_is_valid'] = signature_is_valid
|
|
|
|
defer.returnValue(result)
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-02-20 01:22:21 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-03-08 17:53:22 +01:00
|
|
|
def _get_lbry_file(self, search_by, val, return_json=False, full_status=False):
|
2017-03-06 23:01:35 +01:00
|
|
|
lbry_file = None
|
|
|
|
if search_by in FileID:
|
|
|
|
for l_f in self.lbry_file_manager.lbry_files:
|
|
|
|
if l_f.__dict__.get(search_by) == val:
|
|
|
|
lbry_file = l_f
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise NoValidSearch('{} is not a valid search operation'.format(search_by))
|
|
|
|
if return_json and lbry_file:
|
|
|
|
lbry_file = yield self._get_lbry_file_dict(lbry_file, full_status=full_status)
|
2017-02-20 01:22:21 +01:00
|
|
|
defer.returnValue(lbry_file)
|
|
|
|
|
2017-02-16 15:12:00 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-03-08 20:04:40 +01:00
|
|
|
def _get_lbry_files(self, return_json=False, full_status=False, **kwargs):
|
2017-03-06 23:01:35 +01:00
|
|
|
lbry_files = list(self.lbry_file_manager.lbry_files)
|
|
|
|
if kwargs:
|
|
|
|
for search_type, value in iter_lbry_file_search_values(kwargs):
|
|
|
|
lbry_files = [l_f for l_f in lbry_files if l_f.__dict__[search_type] == value]
|
2017-03-08 20:04:40 +01:00
|
|
|
if return_json:
|
2017-03-06 23:01:35 +01:00
|
|
|
file_dicts = []
|
|
|
|
for lbry_file in lbry_files:
|
|
|
|
lbry_file_dict = yield self._get_lbry_file_dict(lbry_file, full_status=full_status)
|
|
|
|
file_dicts.append(lbry_file_dict)
|
|
|
|
lbry_files = file_dicts
|
2017-06-02 20:00:13 +02:00
|
|
|
log.debug("Collected %i lbry files", len(lbry_files))
|
2017-03-06 23:01:35 +01:00
|
|
|
defer.returnValue(lbry_files)
|
|
|
|
|
|
|
|
# TODO: do this and get_blobs_for_sd_hash in the stream info manager
|
2017-02-16 05:39:17 +01:00
|
|
|
def get_blobs_for_stream_hash(self, stream_hash):
|
|
|
|
def _iter_blobs(blob_hashes):
|
|
|
|
for blob_hash, blob_num, blob_iv, blob_length in blob_hashes:
|
|
|
|
if blob_hash:
|
|
|
|
yield self.session.blob_manager.get_blob(blob_hash, length=blob_length)
|
|
|
|
|
|
|
|
def _get_blobs(blob_hashes):
|
|
|
|
dl = defer.DeferredList(list(_iter_blobs(blob_hashes)), consumeErrors=True)
|
|
|
|
dl.addCallback(lambda blobs: [blob[1] for blob in blobs if blob[0]])
|
|
|
|
return dl
|
|
|
|
|
|
|
|
d = self.stream_info_manager.get_blobs_for_stream(stream_hash)
|
|
|
|
d.addCallback(_get_blobs)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_blobs_for_sd_hash(self, sd_hash):
|
|
|
|
d = self.stream_info_manager.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
d.addCallback(self.get_blobs_for_stream_hash)
|
|
|
|
return d
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
############################################################################
|
|
|
|
# #
|
|
|
|
# JSON-RPC API methods start here #
|
|
|
|
# #
|
|
|
|
############################################################################
|
|
|
|
|
2017-01-18 16:36:01 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-05-28 21:59:17 +02:00
|
|
|
@AuthJSONRPCServer.flags(session_status="-s", dht_status="-d")
|
2017-05-25 20:01:39 +02:00
|
|
|
def jsonrpc_status(self, session_status=False, dht_status=False):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Get daemon status
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
status [-s] [-d]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-s : include session status in results
|
|
|
|
-d : include dht network and peer status
|
2017-06-12 22:19:26 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) lbrynet-daemon status
|
|
|
|
{
|
|
|
|
'lbry_id': lbry peer id, base58
|
|
|
|
'installation_id': installation id, base58
|
|
|
|
'is_running': bool
|
|
|
|
'is_first_run': bool
|
|
|
|
'startup_status': {
|
|
|
|
'code': status code
|
|
|
|
'message': status message
|
|
|
|
},
|
|
|
|
'connection_status': {
|
|
|
|
'code': connection status code
|
|
|
|
'message': connection status message
|
|
|
|
},
|
|
|
|
'blockchain_status': {
|
|
|
|
'blocks': local blockchain height,
|
|
|
|
'blocks_behind': remote_height - local_height,
|
|
|
|
'best_blockhash': block hash of most recent block,
|
|
|
|
},
|
|
|
|
|
|
|
|
If given the session status option:
|
|
|
|
'session_status': {
|
|
|
|
'managed_blobs': count of blobs in the blob manager,
|
|
|
|
'managed_streams': count of streams in the file manager
|
2017-10-05 19:59:27 +02:00
|
|
|
'announce_queue_size': number of blobs currently queued to be announced
|
|
|
|
'should_announce_blobs': number of blobs that should be announced
|
2017-06-12 22:19:26 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
If given the dht status option:
|
|
|
|
'dht_status': {
|
|
|
|
'kbps_received': current kbps receiving,
|
|
|
|
'kbps_sent': current kdps being sent,
|
|
|
|
'total_bytes_sent': total bytes sent
|
|
|
|
'total_bytes_received': total bytes received
|
|
|
|
'queries_received': number of queries received per second
|
|
|
|
'queries_sent': number of queries sent per second
|
|
|
|
'recent_contacts': count of recently contacted peers
|
|
|
|
'unique_contacts': count of unique peers
|
|
|
|
}
|
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2017-02-21 22:30:36 +01:00
|
|
|
# on startup, the wallet or network won't be available but we still need this call to work
|
|
|
|
has_wallet = self.session and self.session.wallet and self.session.wallet.network
|
2017-02-21 18:51:13 +01:00
|
|
|
local_height = self.session.wallet.network.get_local_height() if has_wallet else 0
|
|
|
|
remote_height = self.session.wallet.network.get_server_height() if has_wallet else 0
|
|
|
|
best_hash = (yield self.session.wallet.get_best_blockhash()) if has_wallet else None
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
response = {
|
2017-10-10 19:15:25 +02:00
|
|
|
'lbry_id': base58.b58encode(self.node_id),
|
2017-03-30 18:55:54 +02:00
|
|
|
'installation_id': conf.settings.installation_id,
|
2017-01-03 20:13:01 +01:00
|
|
|
'is_running': self.announced_startup,
|
2017-02-13 20:18:09 +01:00
|
|
|
'is_first_run': self.session.wallet.is_first_run if has_wallet else None,
|
2017-01-03 20:13:01 +01:00
|
|
|
'startup_status': {
|
|
|
|
'code': self.startup_status[0],
|
|
|
|
'message': self.startup_status[1],
|
|
|
|
},
|
|
|
|
'connection_status': {
|
|
|
|
'code': self.connection_status_code,
|
|
|
|
'message': (
|
|
|
|
CONNECTION_MESSAGES[self.connection_status_code]
|
|
|
|
if self.connection_status_code is not None
|
|
|
|
else ''
|
|
|
|
),
|
|
|
|
},
|
2017-02-21 18:51:13 +01:00
|
|
|
'blocks_behind': remote_height - local_height, # deprecated. remove from UI, then here
|
|
|
|
'blockchain_status': {
|
|
|
|
'blocks': local_height,
|
|
|
|
'blocks_behind': remote_height - local_height,
|
|
|
|
'best_blockhash': best_hash,
|
|
|
|
}
|
2017-01-03 20:13:01 +01:00
|
|
|
}
|
2017-01-26 02:06:17 +01:00
|
|
|
if session_status:
|
2017-01-18 16:36:01 +01:00
|
|
|
blobs = yield self.session.blob_manager.get_all_verified_blobs()
|
2017-10-05 19:59:27 +02:00
|
|
|
announce_queue_size = self.session.hash_announcer.hash_queue_size()
|
|
|
|
should_announce_blobs = yield self.session.blob_manager.count_should_announce_blobs()
|
2017-01-18 16:36:01 +01:00
|
|
|
response['session_status'] = {
|
|
|
|
'managed_blobs': len(blobs),
|
|
|
|
'managed_streams': len(self.lbry_file_manager.lbry_files),
|
2017-10-05 19:59:27 +02:00
|
|
|
'announce_queue_size': announce_queue_size,
|
|
|
|
'should_announce_blobs': should_announce_blobs,
|
2017-01-18 16:36:01 +01:00
|
|
|
}
|
2017-05-25 20:01:39 +02:00
|
|
|
if dht_status:
|
|
|
|
response['dht_status'] = self.session.dht_node.get_bandwidth_stats()
|
2017-01-18 16:36:01 +01:00
|
|
|
defer.returnValue(response)
|
2016-04-07 09:12:09 +02:00
|
|
|
|
2016-04-12 04:28:46 +02:00
|
|
|
def jsonrpc_version(self):
|
|
|
|
"""
|
|
|
|
Get lbry version information
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
version
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of lbry version information
|
|
|
|
{
|
2017-03-15 21:31:58 +01:00
|
|
|
'build': (str) build type (e.g. "dev", "rc", "release"),
|
|
|
|
'ip': (str) remote ip, if available,
|
2017-03-14 00:14:11 +01:00
|
|
|
'lbrynet_version': (str) lbrynet_version,
|
|
|
|
'lbryum_version': (str) lbryum_version,
|
2017-04-19 19:54:38 +02:00
|
|
|
'lbryschema_version': (str) lbryschema_version,
|
2017-03-15 21:31:58 +01:00
|
|
|
'os_release': (str) os release string
|
|
|
|
'os_system': (str) os name
|
|
|
|
'platform': (str) platform string
|
|
|
|
'processor': (str) processor type,
|
|
|
|
'python_version': (str) python version,
|
2017-03-14 00:14:11 +01:00
|
|
|
}
|
2016-04-12 04:28:46 +02:00
|
|
|
"""
|
|
|
|
|
2016-05-30 21:49:25 +02:00
|
|
|
platform_info = self._get_platform()
|
2017-03-15 21:19:11 +01:00
|
|
|
log.info("Get version info: " + json.dumps(platform_info))
|
|
|
|
return self._render_response(platform_info)
|
2016-04-09 10:15:58 +02:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_report_bug(self, message=None):
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
|
|
|
Report a bug to slack
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
report_bug (<message> | --message=<message>)
|
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if successful
|
2017-01-02 20:52:24 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
platform_name = self._get_platform()['platform']
|
2017-02-02 16:23:17 +01:00
|
|
|
report_bug_to_slack(
|
|
|
|
message,
|
|
|
|
conf.settings.installation_id,
|
|
|
|
platform_name,
|
2017-04-26 20:18:41 +02:00
|
|
|
get_lbrynet_version()
|
2017-02-02 16:23:17 +01:00
|
|
|
)
|
2017-01-07 01:21:25 +01:00
|
|
|
return self._render_response(True)
|
2017-01-02 20:52:24 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_settings_get(self):
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Get daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
settings_get
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary of daemon settings
|
2017-03-20 20:36:40 +01:00
|
|
|
See ADJUSTABLE_SETTINGS in lbrynet/conf.py for full list of settings
|
2016-01-07 16:24:55 +01:00
|
|
|
"""
|
2017-03-20 20:36:40 +01:00
|
|
|
return self._render_response(conf.settings.get_adjustable_settings_dict())
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-03-20 20:36:40 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_settings_set(self, **kwargs):
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
2017-01-26 02:06:17 +01:00
|
|
|
Set daemon settings
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-06-22 00:17:54 +02:00
|
|
|
Usage:
|
|
|
|
settings_set [<download_directory> | --download_directory=<download_directory>]
|
|
|
|
[<data_rate> | --data_rate=<data_rate>]
|
|
|
|
[<download_timeout> | --download_timeout=<download_timeout>]
|
2017-07-01 20:27:25 +02:00
|
|
|
[<peer_port> | --peer_port=<peer_port>]
|
2017-06-22 00:17:54 +02:00
|
|
|
[<max_key_fee> | --max_key_fee=<max_key_fee>]
|
2017-07-06 20:53:16 +02:00
|
|
|
[<disable_max_key_fee> | --disable_max_key_fee=<disable_max_key_fee>]
|
2017-06-22 00:17:54 +02:00
|
|
|
[<use_upnp> | --use_upnp=<use_upnp>]
|
|
|
|
[<run_reflector_server> | --run_reflector_server=<run_reflector_server>]
|
|
|
|
[<cache_time> | --cache_time=<cache_time>]
|
|
|
|
[<reflect_uploads> | --reflect_uploads=<reflect_uploads>]
|
|
|
|
[<share_usage_data> | --share_usage_data=<share_usage_data>]
|
|
|
|
[<peer_search_timeout> | --peer_search_timeout=<peer_search_timeout>]
|
|
|
|
[<sd_download_timeout> | --sd_download_timeout=<sd_download_timeout>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<download_directory>, --download_directory=<download_directory> : (str)
|
|
|
|
<data_rate>, --data_rate=<data_rate> : (float), 0.0001
|
|
|
|
<download_timeout>, --download_timeout=<download_timeout> : (int), 180
|
2017-07-01 20:27:25 +02:00
|
|
|
<peer_port>, --peer_port=<peer_port> : (int), 3333
|
2017-06-22 00:17:54 +02:00
|
|
|
<max_key_fee>, --max_key_fee=<max_key_fee> : (dict) maximum key fee for downloads,
|
|
|
|
in the format: {
|
|
|
|
"currency": <currency_symbol>,
|
|
|
|
"amount": <amount>
|
|
|
|
}. In the CLI, it must be an escaped
|
|
|
|
JSON string
|
|
|
|
Supported currency symbols:
|
|
|
|
LBC
|
|
|
|
BTC
|
|
|
|
USD
|
2017-07-06 20:53:16 +02:00
|
|
|
<disable_max_key_fee>, --disable_max_key_fee=<disable_max_key_fee> : (bool), False
|
2017-06-22 00:17:54 +02:00
|
|
|
<use_upnp>, --use_upnp=<use_upnp> : (bool), True
|
|
|
|
<run_reflector_server>, --run_reflector_server=<run_reflector_server> : (bool), False
|
|
|
|
<cache_time>, --cache_time=<cache_time> : (int), 150
|
|
|
|
<reflect_uploads>, --reflect_uploads=<reflect_uploads> : (bool), True
|
|
|
|
<share_usage_data>, --share_usage_data=<share_usage_data> : (bool), True
|
|
|
|
<peer_search_timeout>, --peer_search_timeout=<peer_search_timeout> : (int), 3
|
|
|
|
<sd_download_timeout>, --sd_download_timeout=<sd_download_timeout> : (int), 3
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-20 20:36:40 +01:00
|
|
|
(dict) Updated dictionary of daemon settings
|
2016-01-24 06:10:22 +01:00
|
|
|
"""
|
|
|
|
|
2017-03-20 20:36:40 +01:00
|
|
|
yield self._update_settings(kwargs)
|
|
|
|
defer.returnValue(conf.settings.get_adjustable_settings_dict())
|
2016-01-07 16:24:55 +01:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_help(self, command=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-01-11 21:31:08 +01:00
|
|
|
Return a useful message for an API command
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
help [<command> | --command=<command>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<command>, --command=<command> : command to retrieve documentation for
|
2016-04-13 20:47:34 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
if command is None:
|
2017-03-09 13:58:36 +01:00
|
|
|
return self._render_response({
|
2017-03-08 23:10:03 +01:00
|
|
|
'about': 'This is the LBRY JSON-RPC API',
|
|
|
|
'command_help': 'Pass a `command` parameter to this method to see ' +
|
|
|
|
'help for that command (e.g. `help command=resolve_name`)',
|
|
|
|
'command_list': 'Get a full list of commands using the `commands` method',
|
|
|
|
'more_info': 'Visit https://lbry.io/api for more info',
|
2017-03-09 13:58:36 +01:00
|
|
|
})
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
fn = self.callable_methods.get(command)
|
|
|
|
if fn is None:
|
2017-03-08 23:14:31 +01:00
|
|
|
raise Exception(
|
2017-01-26 02:06:17 +01:00
|
|
|
"No help available for '{}'. It is not a valid command.".format(command)
|
|
|
|
)
|
2017-03-08 23:10:03 +01:00
|
|
|
|
2017-03-09 13:58:36 +01:00
|
|
|
return self._render_response({
|
2017-10-09 22:14:16 +02:00
|
|
|
'help': textwrap.dedent(fn.__doc__ or '')
|
2017-03-09 13:58:36 +01:00
|
|
|
})
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_commands(self):
|
|
|
|
"""
|
|
|
|
Return a list of available commands
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
commands
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) list of available commands
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
return self._render_response(sorted([command for command in self.callable_methods.keys()]))
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-05-28 21:59:17 +02:00
|
|
|
@AuthJSONRPCServer.flags(include_unconfirmed='-u')
|
2017-04-23 19:33:06 +02:00
|
|
|
def jsonrpc_wallet_balance(self, address=None, include_unconfirmed=False):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Return the balance of the wallet
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-01 18:16:19 +02:00
|
|
|
wallet_balance [<address> | --address=<address>] [-u]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
|
|
|
<address> : If provided only the balance for this address will be given
|
|
|
|
-u : Include unconfirmed
|
2017-04-23 19:33:06 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(float) amount of lbry credits in wallet
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2017-04-23 19:33:06 +02:00
|
|
|
if address is None:
|
|
|
|
return self._render_response(float(self.session.wallet.get_balance()))
|
|
|
|
else:
|
|
|
|
return self._render_response(float(
|
|
|
|
self.session.wallet.get_address_balance(address, include_unconfirmed)))
|
|
|
|
|
2017-04-30 21:50:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_daemon_stop(self):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-01-24 06:10:22 +01:00
|
|
|
Stop lbrynet-daemon
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
daemon_stop
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(string) Shutdown message
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
|
|
|
|
2017-04-30 21:50:33 +02:00
|
|
|
log.info("Shutting down lbrynet daemon")
|
|
|
|
response = yield self._render_response("Shutting down")
|
|
|
|
reactor.callLater(0.1, reactor.fireSystemEvent, "shutdown")
|
|
|
|
defer.returnValue(response)
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-05-28 21:59:17 +02:00
|
|
|
@AuthJSONRPCServer.flags(full_status='-f')
|
2017-03-06 23:01:35 +01:00
|
|
|
def jsonrpc_file_list(self, **kwargs):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
List files limited by optional filters
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
|
|
|
|
[--claim_id=<claim_id>] [--outpoint=<outpoint>] [--rowid=<rowid>]
|
2017-06-01 21:24:38 +02:00
|
|
|
[--name=<name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
[-f]
|
|
|
|
|
|
|
|
Options:
|
2017-06-01 21:24:38 +02:00
|
|
|
--sd_hash=<sd_hash> : get file with matching sd hash
|
|
|
|
--file_name=<file_name> : get file with matching file name in the
|
2017-05-28 22:01:53 +02:00
|
|
|
downloads folder
|
2017-06-01 21:24:38 +02:00
|
|
|
--stream_hash=<stream_hash> : get file with matching stream hash
|
|
|
|
--claim_id=<claim_id> : get file with matching claim id
|
|
|
|
--outpoint=<outpoint> : get file with matching claim outpoint
|
|
|
|
--rowid=<rowid> : get file with matching row id
|
|
|
|
--name=<name> : get file with matching associated name claim
|
2017-05-28 22:01:53 +02:00
|
|
|
-f : full status, populate the 'message' and 'size' fields
|
2016-04-24 10:42:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of files
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'completed': (bool) true if download is completed,
|
|
|
|
'file_name': (str) name of file,
|
|
|
|
'download_directory': (str) download directory,
|
|
|
|
'points_paid': (float) credit paid to download file,
|
|
|
|
'stopped': (bool) true if download is stopped,
|
|
|
|
'stream_hash': (str) stream hash of file,
|
|
|
|
'stream_name': (str) stream name ,
|
|
|
|
'suggested_file_name': (str) suggested file name,
|
|
|
|
'sd_hash': (str) sd hash of file,
|
|
|
|
'name': (str) name claim attached to file
|
|
|
|
'outpoint': (str) claim outpoint attached to file
|
|
|
|
'claim_id': (str) claim ID attached to file,
|
|
|
|
'download_path': (str) download path of file,
|
|
|
|
'mime_type': (str) mime type of file,
|
|
|
|
'key': (str) key attached to file,
|
|
|
|
'total_bytes': (int) file size in bytes, None if full_status is false
|
|
|
|
'written_bytes': (int) written size in bytes
|
|
|
|
'message': (str), None if full_status is false
|
|
|
|
'metadata': (dict) Metadata dictionary
|
|
|
|
},
|
2017-03-06 23:01:35 +01:00
|
|
|
]
|
|
|
|
"""
|
|
|
|
|
2017-03-08 20:04:40 +01:00
|
|
|
result = yield self._get_lbry_files(return_json=True, **kwargs)
|
2017-03-06 23:01:35 +01:00
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
2016-10-20 21:52:37 +02:00
|
|
|
|
2017-02-11 16:33:06 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-05-28 21:59:17 +02:00
|
|
|
@AuthJSONRPCServer.flags(force='-f')
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_resolve_name(self, name, force=False):
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
Resolve stream info from a LBRY name
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
resolve_name <name> [-f]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-f : force refresh and do not check cache
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Metadata dictionary from name claim, None if the name is not
|
|
|
|
resolvable
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2017-02-11 16:33:06 +01:00
|
|
|
try:
|
|
|
|
metadata = yield self._resolve_name(name, force_refresh=force)
|
|
|
|
except UnknownNameError:
|
|
|
|
log.info('Name %s is not known', name)
|
|
|
|
defer.returnValue(None)
|
|
|
|
else:
|
|
|
|
defer.returnValue(metadata)
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2017-04-12 19:53:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-06-23 18:00:33 +02:00
|
|
|
def jsonrpc_claim_show(self, txid=None, nout=None, claim_id=None):
|
2016-07-28 22:12:20 +02:00
|
|
|
"""
|
2017-06-23 20:47:28 +02:00
|
|
|
Resolve claim info from txid/nout or with claim ID
|
2016-07-28 22:12:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-23 18:00:33 +02:00
|
|
|
claim_show [<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[<claim_id> | --claim_id=<claim_id>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2017-06-23 20:47:28 +02:00
|
|
|
<txid>, --txid=<txid> : look for claim with this txid, nout must
|
|
|
|
also be specified
|
|
|
|
<nout>, --nout=<nout> : look for claim with this nout, txid must
|
|
|
|
also be specified
|
2017-05-28 22:01:53 +02:00
|
|
|
<claim_id>, --claim_id=<claim_id> : look for claim with this claim id
|
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
2017-06-23 20:47:28 +02:00
|
|
|
(dict) Dictionary containing claim info as below,
|
2017-03-11 22:04:10 +01:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
{
|
|
|
|
'txid': (str) txid of claim
|
|
|
|
'nout': (int) nout of claim
|
|
|
|
'amount': (float) amount of claim
|
|
|
|
'value': (str) value of claim
|
|
|
|
'height' : (int) height of claim takeover
|
|
|
|
'claim_id': (str) claim ID of claim
|
|
|
|
'supports': (list) list of supports associated with claim
|
|
|
|
}
|
2017-06-23 20:47:28 +02:00
|
|
|
|
|
|
|
if claim cannot be resolved, dictionary as below will be returned
|
|
|
|
|
|
|
|
{
|
|
|
|
'error': (str) reason for error
|
|
|
|
}
|
|
|
|
|
2016-07-28 22:12:20 +02:00
|
|
|
"""
|
2017-06-23 18:00:33 +02:00
|
|
|
if claim_id is not None and txid is None and nout is None:
|
|
|
|
claim_results = yield self.session.wallet.get_claim_by_claim_id(claim_id)
|
|
|
|
elif txid is not None and nout is not None and claim_id is None:
|
|
|
|
outpoint = ClaimOutpoint(txid, nout)
|
|
|
|
claim_results = yield self.session.wallet.get_claim_by_outpoint(outpoint)
|
|
|
|
else:
|
|
|
|
raise Exception("Must specify either txid/nout, or claim_id")
|
2017-07-04 03:20:57 +02:00
|
|
|
response = yield self._render_response(claim_results)
|
2017-04-12 19:53:07 +02:00
|
|
|
defer.returnValue(response)
|
2016-07-28 20:55:17 +02:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2016-12-30 07:12:20 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-05-28 21:59:17 +02:00
|
|
|
@AuthJSONRPCServer.flags(force='-f')
|
2017-06-09 19:47:13 +02:00
|
|
|
def jsonrpc_resolve(self, force=False, uri=None, uris=[]):
|
2017-04-07 02:45:05 +02:00
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
Resolve given LBRY URIs
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-09 19:47:13 +02:00
|
|
|
resolve [-f] (<uri> | --uri=<uri>) [<uris>...]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
|
|
|
-f : force refresh and ignore cache
|
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
2017-06-09 19:47:13 +02:00
|
|
|
Dictionary of results, keyed by uri
|
|
|
|
'<uri>': {
|
|
|
|
If a resolution error occurs:
|
|
|
|
'error': Error message
|
|
|
|
|
|
|
|
If the uri resolves to a channel or a claim in a channel:
|
|
|
|
'certificate': {
|
2017-04-12 23:42:55 +02:00
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
2017-04-12 23:42:55 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
2017-04-12 20:04:11 +02:00
|
|
|
}
|
2017-06-09 19:47:13 +02:00
|
|
|
|
2017-08-17 20:32:28 +02:00
|
|
|
If the uri resolves to a channel:
|
|
|
|
'claims_in_channel': (int) number of claims in the channel,
|
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
If the uri resolves to a claim:
|
|
|
|
'claim': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
|
|
|
'channel_name': (str) channel name if claim is in a channel
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}]
|
2017-06-09 19:47:13 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
2017-04-07 02:45:05 +02:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
uris = tuple(uris)
|
|
|
|
if uri is not None:
|
|
|
|
uris += (uri,)
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
|
|
|
|
valid_uris = tuple()
|
|
|
|
for u in uris:
|
|
|
|
try:
|
|
|
|
parse_lbry_uri(u)
|
|
|
|
valid_uris += (u, )
|
|
|
|
except URIParseError:
|
|
|
|
results[u] = {"error": "%s is not a valid uri" % u}
|
|
|
|
|
|
|
|
resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=not force)
|
|
|
|
|
|
|
|
for resolved_uri in resolved:
|
|
|
|
results[resolved_uri] = resolved[resolved_uri]
|
|
|
|
response = yield self._render_response(results)
|
|
|
|
defer.returnValue(response)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-04-07 02:40:55 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
2017-06-13 21:28:49 +02:00
|
|
|
def jsonrpc_get(self, uri, file_name=None, timeout=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-06 23:01:35 +01:00
|
|
|
Download stream from a LBRY name.
|
2015-12-15 03:49:01 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
get <uri> [<file_name> | --file_name=<file_name>] [<timeout> | --timeout=<timeout>]
|
2017-06-13 21:28:49 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
|
|
|
<file_name> : specified name for the downloaded file
|
|
|
|
<timeout> : download timeout in number of seconds
|
|
|
|
<download_directory> : path to directory where file will be saved
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-09-12 15:41:11 +02:00
|
|
|
(dict) Dictionary containing information about the stream
|
2017-03-06 23:24:13 +01:00
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'completed': (bool) true if download is completed,
|
|
|
|
'file_name': (str) name of file,
|
|
|
|
'download_directory': (str) download directory,
|
|
|
|
'points_paid': (float) credit paid to download file,
|
|
|
|
'stopped': (bool) true if download is stopped,
|
|
|
|
'stream_hash': (str) stream hash of file,
|
2017-04-12 23:42:55 +02:00
|
|
|
'stream_name': (str) stream name,
|
2017-03-14 00:14:11 +01:00
|
|
|
'suggested_file_name': (str) suggested file name,
|
|
|
|
'sd_hash': (str) sd hash of file,
|
|
|
|
'name': (str) name claim attached to file
|
|
|
|
'outpoint': (str) claim outpoint attached to file
|
|
|
|
'claim_id': (str) claim ID attached to file,
|
|
|
|
'download_path': (str) download path of file,
|
|
|
|
'mime_type': (str) mime type of file,
|
|
|
|
'key': (str) key attached to file,
|
|
|
|
'total_bytes': (int) file size in bytes, None if full_status is false
|
|
|
|
'written_bytes': (int) written size in bytes
|
|
|
|
'message': (str), None if full_status is false
|
|
|
|
'metadata': (dict) Metadata dictionary
|
2017-03-06 23:24:13 +01:00
|
|
|
}
|
2015-12-06 23:32:17 +01:00
|
|
|
"""
|
2017-02-20 01:22:21 +01:00
|
|
|
|
2017-02-02 05:40:03 +01:00
|
|
|
timeout = timeout if timeout is not None else self.download_timeout
|
2016-12-30 19:24:52 +01:00
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
resolved_result = yield self.session.wallet.resolve(uri)
|
|
|
|
if resolved_result and uri in resolved_result:
|
|
|
|
resolved = resolved_result[uri]
|
|
|
|
else:
|
|
|
|
resolved = None
|
2017-03-29 04:04:13 +02:00
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
if not resolved or 'value' not in resolved:
|
2017-04-07 02:40:55 +02:00
|
|
|
if 'claim' not in resolved:
|
2017-06-30 18:51:05 +02:00
|
|
|
raise Exception(
|
|
|
|
"Failed to resolve stream at lbry://{}".format(uri.replace("lbry://", "")))
|
2017-04-07 02:40:55 +02:00
|
|
|
else:
|
|
|
|
resolved = resolved['claim']
|
2017-03-29 04:04:13 +02:00
|
|
|
|
2017-04-07 02:40:55 +02:00
|
|
|
name = resolved['name']
|
|
|
|
claim_id = resolved['claim_id']
|
2017-06-02 20:00:13 +02:00
|
|
|
claim_dict = ClaimDict.load_dict(resolved['value'])
|
2017-04-07 02:40:55 +02:00
|
|
|
|
|
|
|
if claim_id in self.streams:
|
|
|
|
log.info("Already waiting on lbry://%s to start downloading", name)
|
|
|
|
yield self.streams[claim_id].data_downloading_deferred
|
|
|
|
|
|
|
|
lbry_file = yield self._get_lbry_file(FileID.CLAIM_ID, claim_id, return_json=False)
|
2017-03-06 23:24:13 +01:00
|
|
|
|
2016-12-30 19:24:52 +01:00
|
|
|
if lbry_file:
|
2017-03-06 23:24:13 +01:00
|
|
|
if not os.path.isfile(os.path.join(lbry_file.download_directory, lbry_file.file_name)):
|
|
|
|
log.info("Already have lbry file but missing file in %s, rebuilding it",
|
|
|
|
lbry_file.download_directory)
|
|
|
|
yield lbry_file.start()
|
|
|
|
else:
|
|
|
|
log.info('Already have a file for %s', name)
|
|
|
|
result = yield self._get_lbry_file_dict(lbry_file, full_status=True)
|
|
|
|
else:
|
2017-06-02 20:00:13 +02:00
|
|
|
result = yield self._download_name(name, claim_dict, claim_id, timeout=timeout,
|
2017-04-07 02:40:55 +02:00
|
|
|
file_name=file_name)
|
2017-03-06 23:24:13 +01:00
|
|
|
response = yield self._render_response(result)
|
2016-12-30 07:12:20 +01:00
|
|
|
defer.returnValue(response)
|
2015-12-08 06:37:49 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
2017-03-24 18:37:31 +01:00
|
|
|
def jsonrpc_file_set_status(self, status, **kwargs):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-24 18:37:31 +01:00
|
|
|
Start or stop downloading a file
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-01 21:24:38 +02:00
|
|
|
file_set_status <status> [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
|
|
|
|
[--outpoint=<outpoint>] [--rowid=<rowid>]
|
|
|
|
[--name=<name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2017-06-01 21:24:38 +02:00
|
|
|
--sd_hash=<sd_hash> : set status of file with matching sd hash
|
|
|
|
--file_name=<file_name> : set status of file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : set status of file with matching stream hash
|
|
|
|
--claim_id=<claim_id> : set status of file with matching claim id
|
|
|
|
--outpoint=<outpoint> : set status of file with matching claim outpoint
|
|
|
|
--rowid=<rowid> : set status of file with matching row id
|
|
|
|
--name=<name> : set status of file with matching associated name claim
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-24 23:51:24 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Confirmation message
|
2016-04-24 23:51:24 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if status not in ['start', 'stop']:
|
|
|
|
raise Exception('Status must be "start" or "stop".')
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-02-02 05:40:03 +01:00
|
|
|
search_type, value = get_lbry_file_search_value(kwargs)
|
2017-01-03 20:13:01 +01:00
|
|
|
lbry_file = yield self._get_lbry_file(search_type, value, return_json=False)
|
|
|
|
if not lbry_file:
|
|
|
|
raise Exception('Unable to find a file for {}:{}'.format(search_type, value))
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if status == 'start' and lbry_file.stopped or status == 'stop' and not lbry_file.stopped:
|
|
|
|
yield self.lbry_file_manager.toggle_lbry_file_running(lbry_file)
|
2017-03-24 18:37:31 +01:00
|
|
|
msg = "Started downloading file" if status == 'start' else "Stopped downloading file"
|
2017-01-03 20:13:01 +01:00
|
|
|
else:
|
2017-01-10 02:05:27 +01:00
|
|
|
msg = (
|
2017-03-24 18:37:31 +01:00
|
|
|
"File was already being downloaded" if status == 'start'
|
2017-04-26 20:15:38 +02:00
|
|
|
else "File was already stopped"
|
2017-01-10 02:05:27 +01:00
|
|
|
)
|
2017-01-11 18:35:01 +01:00
|
|
|
response = yield self._render_response(msg)
|
|
|
|
defer.returnValue(response)
|
2016-01-17 05:06:24 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-03-06 23:19:05 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-06-06 21:01:05 +02:00
|
|
|
@AuthJSONRPCServer.flags(delete_from_download_dir='-f', delete_all='--delete_all')
|
|
|
|
def jsonrpc_file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Delete a LBRY file
|
|
|
|
|
|
|
|
Usage:
|
2017-06-06 21:01:05 +02:00
|
|
|
file_delete [-f] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
|
|
|
|
[--outpoint=<outpoint>] [--rowid=<rowid>]
|
2017-06-01 21:24:38 +02:00
|
|
|
[--name=<name>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
2017-06-06 21:01:05 +02:00
|
|
|
-f, --delete_from_download_dir : delete file from download directory,
|
|
|
|
instead of just deleting blobs
|
|
|
|
--delete_all : if there are multiple matching files,
|
|
|
|
allow the deletion of multiple files.
|
|
|
|
Otherwise do not delete anything.
|
|
|
|
--sd_hash=<sd_hash> : delete by file sd hash
|
|
|
|
--file_name<file_name> : delete by file name in downloads folder
|
|
|
|
--stream_hash=<stream_hash> : delete by file stream hash
|
|
|
|
--claim_id=<claim_id> : delete by file claim id
|
|
|
|
--outpoint=<outpoint> : delete by file claim outpoint
|
|
|
|
--rowid=<rowid> : delete by file row id
|
|
|
|
--name=<name> : delete by associated name claim of file
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if deletion was successful
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2017-03-08 20:19:54 +01:00
|
|
|
lbry_files = yield self._get_lbry_files(return_json=False, **kwargs)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-03-08 00:18:47 +01:00
|
|
|
if len(lbry_files) > 1:
|
2017-04-07 02:45:05 +02:00
|
|
|
if not delete_all:
|
|
|
|
log.warning("There are %i files to delete, use narrower filters to select one",
|
|
|
|
len(lbry_files))
|
2017-06-05 20:21:55 +02:00
|
|
|
response = yield self._render_response(False)
|
|
|
|
defer.returnValue(response)
|
2017-04-07 02:45:05 +02:00
|
|
|
else:
|
|
|
|
log.warning("Deleting %i files",
|
|
|
|
len(lbry_files))
|
|
|
|
|
|
|
|
if not lbry_files:
|
2017-03-08 20:19:54 +01:00
|
|
|
log.warning("There is no file to delete")
|
2017-03-06 23:19:05 +01:00
|
|
|
result = False
|
2016-10-20 21:52:37 +02:00
|
|
|
else:
|
2017-04-07 02:45:05 +02:00
|
|
|
for lbry_file in lbry_files:
|
|
|
|
file_name, stream_hash = lbry_file.file_name, lbry_file.stream_hash
|
|
|
|
if lbry_file.claim_id in self.streams:
|
|
|
|
del self.streams[lbry_file.claim_id]
|
|
|
|
yield self.lbry_file_manager.delete_lbry_file(lbry_file,
|
2017-06-06 21:01:05 +02:00
|
|
|
delete_file=delete_from_download_dir)
|
2017-06-15 21:58:32 +02:00
|
|
|
log.info("Deleted file: %s", file_name)
|
2017-03-06 23:19:05 +01:00
|
|
|
result = True
|
2017-06-05 20:21:55 +02:00
|
|
|
|
2017-03-06 23:19:05 +01:00
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
2016-04-24 23:51:24 +02:00
|
|
|
|
2017-02-11 16:33:06 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-04-11 04:47:54 +02:00
|
|
|
def jsonrpc_stream_cost_estimate(self, uri, size=None):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Get estimated cost for a lbry stream
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
stream_cost_estimate <uri> [<size> | --size=<size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<size>, --size=<size> : stream size in bytes. if provided an sd blob won't be
|
|
|
|
downloaded.
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-04-18 18:50:07 +02:00
|
|
|
(float) Estimated cost in lbry credits, returns None if uri is not
|
|
|
|
resolveable
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-04-11 04:47:54 +02:00
|
|
|
cost = yield self.get_est_cost(uri, size)
|
2017-02-11 16:33:06 +01:00
|
|
|
defer.returnValue(cost)
|
2017-01-03 20:13:01 +01:00
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_channel_new(self, channel_name, amount):
|
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Generate a publisher key and create a new '@' prefixed certificate claim
|
2017-04-07 02:45:05 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
channel_new (<channel_name> | --channel_name=<channel_name>)
|
|
|
|
(<amount> | --amount=<amount>)
|
2017-04-07 02:45:05 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
'tx' : (str) hex encoded transaction
|
|
|
|
'txid' : (str) txid of resulting claim
|
|
|
|
'nout' : (int) nout of the resulting claim
|
|
|
|
'fee' : (float) fee paid for the claim transaction
|
|
|
|
'claim_id' : (str) claim ID of the resulting claim
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2017-04-12 19:01:53 +02:00
|
|
|
try:
|
|
|
|
parsed = parse_lbry_uri(channel_name)
|
|
|
|
if not parsed.is_channel:
|
|
|
|
raise Exception("Cannot make a new channel for a non channel name")
|
|
|
|
if parsed.path:
|
|
|
|
raise Exception("Invalid channel uri")
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid channel name")
|
|
|
|
if amount <= 0:
|
|
|
|
raise Exception("Invalid amount")
|
2017-04-12 20:47:46 +02:00
|
|
|
if amount > self.session.wallet.get_balance():
|
2017-04-12 19:01:53 +02:00
|
|
|
raise InsufficientFundsError()
|
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
result = yield self.session.wallet.claim_new_channel(channel_name, amount)
|
2017-04-27 02:02:00 +02:00
|
|
|
self.analytics_manager.send_new_channel()
|
2017-04-10 17:01:28 +02:00
|
|
|
log.info("Claimed a new channel! Result: %s", result)
|
2017-04-07 02:45:05 +02:00
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_channel_list_mine(self):
|
|
|
|
"""
|
|
|
|
Get my channels
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
channel_list_mine
|
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
Returns:
|
|
|
|
(list) ClaimDict
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = yield self.session.wallet.channel_list()
|
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2017-11-22 19:46:34 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_channel_export(self, claim_id):
|
|
|
|
"""
|
|
|
|
Export serialized channel signing information for a given certificate claim id
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_export (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Serialized certificate information
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = yield self.session.wallet.export_certificate_info(claim_id)
|
|
|
|
defer.returnValue(result)
|
|
|
|
|
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_channel_import(self, serialized_certificate_info):
|
|
|
|
"""
|
|
|
|
Import serialized channel signing information (to allow signing new claims to the channel)
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_import (<serialized_certificate_info> |
|
|
|
|
--serialized_certificate_info=<serialized_certificate_info>)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Result dictionary
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = yield self.session.wallet.import_certificate_info(serialized_certificate_info)
|
|
|
|
defer.returnValue(result)
|
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-03-16 19:29:34 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_publish(self, name, bid, metadata=None, file_path=None, fee=None, title=None,
|
|
|
|
description=None, author=None, language=None, license=None,
|
2017-04-07 02:45:05 +02:00
|
|
|
license_url=None, thumbnail=None, preview=None, nsfw=None, sources=None,
|
2017-06-03 02:26:03 +02:00
|
|
|
channel_name=None, channel_id=None,
|
2017-06-12 19:32:01 +02:00
|
|
|
claim_address=None, change_address=None):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-03-28 17:47:01 +02:00
|
|
|
Make a new name claim and publish associated data to lbrynet,
|
|
|
|
update over existing claim if user already has a claim for name.
|
2016-01-21 04:00:28 +01:00
|
|
|
|
2017-03-16 19:29:34 +01:00
|
|
|
Fields required in the final Metadata are:
|
|
|
|
'title'
|
|
|
|
'description'
|
|
|
|
'author'
|
|
|
|
'language'
|
2017-05-28 22:01:53 +02:00
|
|
|
'license'
|
2017-03-16 19:29:34 +01:00
|
|
|
'nsfw'
|
|
|
|
|
|
|
|
Metadata can be set by either using the metadata argument or by setting individual arguments
|
|
|
|
fee, title, description, author, language, license, license_url, thumbnail, preview, nsfw,
|
|
|
|
or sources. Individual arguments will overwrite the fields specified in metadata argument.
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
publish (<name> | --name=<name>) (<bid> | --bid=<bid>) [--metadata=<metadata>]
|
|
|
|
[--file_path=<file_path>] [--fee=<fee>] [--title=<title>]
|
|
|
|
[--description=<description>] [--author=<author>] [--language=<language>]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail=<thumbnail>]
|
|
|
|
[--preview=<preview>] [--nsfw=<nsfw>] [--sources=<sources>]
|
|
|
|
[--channel_name=<channel_name>] [--channel_id=<channel_id>]
|
2017-06-12 19:32:01 +02:00
|
|
|
[--claim_address=<claim_address>] [--change_address=<change_address>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
|
|
|
--metadata=<metadata> : ClaimDict to associate with the claim.
|
|
|
|
--file_path=<file_path> : path to file to be associated with name. If provided,
|
|
|
|
a lbry stream of this file will be used in 'sources'.
|
2017-11-09 01:50:37 +01:00
|
|
|
If no path is given but a sources dict is provided,
|
|
|
|
it will be used. If neither are provided, an
|
|
|
|
error is raised.
|
2017-05-28 22:01:53 +02:00
|
|
|
--fee=<fee> : Dictionary representing key fee to download content:
|
2017-05-30 23:07:23 +02:00
|
|
|
{
|
|
|
|
'currency': currency_symbol,
|
|
|
|
'amount': float,
|
|
|
|
'address': str, optional
|
|
|
|
}
|
2017-05-28 22:01:53 +02:00
|
|
|
supported currencies: LBC, USD, BTC
|
|
|
|
If an address is not provided a new one will be
|
|
|
|
automatically generated. Default fee is zero.
|
|
|
|
--title=<title> : title of the publication
|
|
|
|
--description=<description> : description of the publication
|
|
|
|
--author=<author> : author of the publication
|
|
|
|
--language=<language> : language of the publication
|
|
|
|
--license=<license> : publication license
|
|
|
|
--license_url=<license_url> : publication license url
|
|
|
|
--thumbnail=<thumbnail> : thumbnail url
|
|
|
|
--preview=<preview> : preview url
|
|
|
|
--nsfw=<nsfw> : title of the publication
|
|
|
|
--sources=<sources> : {'lbry_sd_hash':sd_hash} specifies sd hash of file
|
|
|
|
--channel_name=<channel_name> : name of the publisher channel name in the wallet
|
|
|
|
--channel_id=<channel_id> : claim id of the publisher channel, does not check
|
|
|
|
for channel claim being in the wallet. This allows
|
|
|
|
publishing to a channel where only the certificate
|
|
|
|
private key is in the wallet.
|
2017-06-03 02:26:03 +02:00
|
|
|
--claim_address=<claim_address> : address where the claim is sent to, if not specified
|
|
|
|
new address wil automatically be created
|
2017-05-20 17:59:55 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
'tx' : (str) hex encoded transaction
|
|
|
|
'txid' : (str) txid of resulting claim
|
|
|
|
'nout' : (int) nout of the resulting claim
|
|
|
|
'fee' : (float) fee paid for the claim transaction
|
|
|
|
'claim_id' : (str) claim ID of the resulting claim
|
|
|
|
}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-07-25 07:40:26 +02:00
|
|
|
|
2017-04-12 19:01:53 +02:00
|
|
|
try:
|
|
|
|
parse_lbry_uri(name)
|
|
|
|
except (TypeError, URIParseError):
|
|
|
|
raise Exception("Invalid name given to publish")
|
|
|
|
|
2017-03-16 19:29:34 +01:00
|
|
|
if bid <= 0.0:
|
|
|
|
raise Exception("Invalid bid")
|
|
|
|
|
2017-05-30 16:48:08 +02:00
|
|
|
if bid >= self.session.wallet.get_balance():
|
2017-06-09 00:39:26 +02:00
|
|
|
raise InsufficientFundsError('Insufficient funds. ' \
|
|
|
|
'Make sure you have enough LBC to deposit')
|
2017-04-12 19:01:53 +02:00
|
|
|
|
2017-03-16 19:29:34 +01:00
|
|
|
metadata = metadata or {}
|
|
|
|
if fee is not None:
|
|
|
|
metadata['fee'] = fee
|
|
|
|
if title is not None:
|
|
|
|
metadata['title'] = title
|
|
|
|
if description is not None:
|
|
|
|
metadata['description'] = description
|
|
|
|
if author is not None:
|
|
|
|
metadata['author'] = author
|
|
|
|
if language is not None:
|
|
|
|
metadata['language'] = language
|
|
|
|
if license is not None:
|
|
|
|
metadata['license'] = license
|
|
|
|
if license_url is not None:
|
2017-05-30 16:48:21 +02:00
|
|
|
metadata['licenseUrl'] = license_url
|
2017-03-16 19:29:34 +01:00
|
|
|
if thumbnail is not None:
|
|
|
|
metadata['thumbnail'] = thumbnail
|
|
|
|
if preview is not None:
|
|
|
|
metadata['preview'] = preview
|
|
|
|
if nsfw is not None:
|
|
|
|
metadata['nsfw'] = bool(nsfw)
|
|
|
|
|
2017-04-03 21:58:20 +02:00
|
|
|
metadata['version'] = '_0_1_0'
|
|
|
|
|
2017-06-02 19:56:41 +02:00
|
|
|
# check for original deprecated format {'currency':{'address','amount'}}
|
|
|
|
# add address, version to fee if unspecified
|
2017-03-16 19:29:34 +01:00
|
|
|
if 'fee' in metadata:
|
2017-06-02 19:56:41 +02:00
|
|
|
if len(metadata['fee'].keys()) == 1 and isinstance(metadata['fee'].values()[0], dict):
|
|
|
|
raise Exception('Old format for fee no longer supported. ' \
|
|
|
|
'Fee must be specified as {"currency":,"address":,"amount":}')
|
|
|
|
|
2017-05-30 16:49:22 +02:00
|
|
|
if 'amount' in metadata['fee'] and 'currency' in metadata['fee']:
|
|
|
|
if not metadata['fee']['amount']:
|
|
|
|
log.warning("Stripping empty fee from published metadata")
|
|
|
|
del metadata['fee']
|
|
|
|
elif 'address' not in metadata['fee']:
|
|
|
|
address = yield self.session.wallet.get_unused_address()
|
|
|
|
metadata['fee']['address'] = address
|
2017-06-02 17:50:36 +02:00
|
|
|
if 'fee' in metadata and 'version' not in metadata['fee']:
|
|
|
|
metadata['fee']['version'] = '_0_0_1'
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2017-04-03 21:58:20 +02:00
|
|
|
claim_dict = {
|
2017-04-07 02:45:05 +02:00
|
|
|
'version': '_0_0_1',
|
|
|
|
'claimType': 'streamType',
|
|
|
|
'stream': {
|
|
|
|
'metadata': metadata,
|
|
|
|
'version': '_0_0_1'
|
|
|
|
}
|
|
|
|
}
|
2017-04-03 21:58:20 +02:00
|
|
|
|
2017-11-09 01:50:37 +01:00
|
|
|
# this will be used to verify the format with lbryschema
|
|
|
|
claim_copy = deepcopy(claim_dict)
|
2017-04-03 21:58:20 +02:00
|
|
|
if sources is not None:
|
|
|
|
claim_dict['stream']['source'] = sources
|
2017-11-09 01:50:37 +01:00
|
|
|
claim_copy['stream']['source'] = sources
|
|
|
|
elif file_path is not None:
|
|
|
|
if not os.path.isfile(file_path):
|
|
|
|
raise Exception("invalid file path to publish")
|
|
|
|
# since the file hasn't yet been made into a stream, we don't have
|
|
|
|
# a valid Source for the claim when validating the format, we'll use a fake one
|
|
|
|
claim_copy['stream']['source'] = {
|
|
|
|
'version': '_0_0_1',
|
|
|
|
'sourceType': 'lbry_sd_hash',
|
|
|
|
'source': '0' * 96,
|
|
|
|
'contentType': ''
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
# there is no existing source to use, and a file was not provided to make a new one
|
|
|
|
raise Exception("no source provided to publish")
|
|
|
|
try:
|
|
|
|
ClaimDict.load_dict(claim_copy)
|
|
|
|
# the metadata to use in the claim can be serialized by lbryschema
|
|
|
|
except DecodeError as err:
|
|
|
|
# there was a problem with a metadata field, raise an error here rather than
|
|
|
|
# waiting to find out when we go to publish the claim (after having made the stream)
|
|
|
|
raise Exception("invalid publish metadata: %s" % err.message)
|
2017-04-03 21:58:20 +02:00
|
|
|
|
2017-05-30 16:49:22 +02:00
|
|
|
log.info("Publish: %s", {
|
|
|
|
'name': name,
|
|
|
|
'file_path': file_path,
|
|
|
|
'bid': bid,
|
2017-06-12 19:32:01 +02:00
|
|
|
'claim_address': claim_address,
|
|
|
|
'change_address': change_address,
|
2017-05-30 16:49:22 +02:00
|
|
|
'claim_dict': claim_dict,
|
|
|
|
})
|
|
|
|
|
2017-05-19 23:26:58 +02:00
|
|
|
if channel_id:
|
|
|
|
certificate_id = channel_id
|
|
|
|
elif channel_name:
|
2017-04-07 02:45:05 +02:00
|
|
|
certificate_id = None
|
|
|
|
my_certificates = yield self.session.wallet.channel_list()
|
|
|
|
for certificate in my_certificates:
|
|
|
|
if channel_name == certificate['name']:
|
|
|
|
certificate_id = certificate['claim_id']
|
|
|
|
break
|
|
|
|
if not certificate_id:
|
|
|
|
raise Exception("Cannot publish using channel %s" % channel_name)
|
|
|
|
else:
|
|
|
|
certificate_id = None
|
|
|
|
|
2017-06-12 19:32:01 +02:00
|
|
|
result = yield self._publish_stream(name, bid, claim_dict, file_path, certificate_id,
|
|
|
|
claim_address, change_address)
|
2017-03-16 19:29:34 +01:00
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
2015-12-06 23:32:17 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-02-13 20:17:53 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-08-08 18:37:28 +02:00
|
|
|
def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
Abandon a name and reclaim credits from the claim
|
2016-12-09 04:05:31 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-08-08 18:37:28 +02:00
|
|
|
claim_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Return:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
txid : (str) txid of resulting transaction
|
|
|
|
fee : (float) fee paid for the transaction
|
|
|
|
}
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2017-08-08 18:37:28 +02:00
|
|
|
if claim_id is None and txid is None and nout is None:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
if txid is None and nout is not None:
|
|
|
|
raise Exception('Must specify txid')
|
|
|
|
if nout is None and txid is not None:
|
|
|
|
raise Exception('Must specify nout')
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2017-08-08 19:54:25 +02:00
|
|
|
result = yield self.session.wallet.abandon_claim(claim_id, txid, nout)
|
|
|
|
self.analytics_manager.send_claim_action('abandon')
|
|
|
|
defer.returnValue(result)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-04-27 02:02:00 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_claim_new_support(self, name, claim_id, amount):
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
|
|
|
Support a name claim
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
claim_new_support (<name> | --name=<name>) (<claim_id> | --claim_id=<claim_id>)
|
2017-06-20 16:49:27 +02:00
|
|
|
(<amount> | --amount=<amount>)
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
Return:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) Dictionary containing result of the claim
|
|
|
|
{
|
|
|
|
txid : (str) txid of resulting support claim
|
|
|
|
nout : (int) nout of the resulting support claim
|
|
|
|
fee : (float) fee paid for the transaction
|
|
|
|
}
|
2016-08-08 08:32:56 +02:00
|
|
|
"""
|
|
|
|
|
2017-04-27 02:02:00 +02:00
|
|
|
result = yield self.session.wallet.support_claim(name, claim_id, amount)
|
|
|
|
self.analytics_manager.send_claim_action('new_support')
|
|
|
|
defer.returnValue(result)
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2017-06-12 16:30:18 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_claim_send_to_address(self, claim_id, address, amount=None):
|
|
|
|
"""
|
|
|
|
Send a name claim to an address
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_send_to_address (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
(<address> | --address=<address>)
|
|
|
|
[<amount> | --amount=<amount>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<amount> : Amount of credits to claim name for, defaults to the current amount
|
|
|
|
on the claim
|
|
|
|
"""
|
|
|
|
result = yield self.session.wallet.send_claim_to_address(claim_id, address, amount)
|
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
# TODO: claim_list_mine should be merged into claim_list, but idk how to authenticate it -Grin
|
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
def jsonrpc_claim_list_mine(self):
|
|
|
|
"""
|
|
|
|
List my name claims
|
2016-03-24 03:27:48 +01:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
claim_list_mine
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of name claims owned by user
|
2017-03-11 22:04:10 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'address': (str) address that owns the claim
|
|
|
|
'amount': (float) amount assigned to the claim
|
|
|
|
'blocks_to_expiration': (int) number of blocks until it expires
|
|
|
|
'category': (str) "claim", "update" , or "support"
|
|
|
|
'claim_id': (str) claim ID of the claim
|
|
|
|
'confirmations': (int) number of blocks of confirmations for the claim
|
|
|
|
'expiration_height': (int) the block height which the claim will expire
|
|
|
|
'expired': (bool) true if expired, false otherwise
|
|
|
|
'height': (int) height of the block containing the claim
|
|
|
|
'is_spent': (bool) true if claim is abandoned, false otherwise
|
|
|
|
'name': (str) name of the claim
|
|
|
|
'txid': (str) txid of the cliam
|
|
|
|
'nout': (int) nout of the claim
|
|
|
|
'value': (str) value of the claim
|
2017-03-11 22:04:10 +01:00
|
|
|
},
|
|
|
|
]
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2016-01-26 02:28:05 +01:00
|
|
|
d = self.session.wallet.get_name_claims()
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda claims: self._render_response(claims))
|
2016-01-26 02:28:05 +01:00
|
|
|
return d
|
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-03-09 00:02:02 +01:00
|
|
|
def jsonrpc_claim_list(self, name):
|
2016-08-08 07:43:10 +02:00
|
|
|
"""
|
2017-06-09 19:47:13 +02:00
|
|
|
List current claims and information about them for a given name
|
2016-08-08 07:43:10 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
claim_list (<name> | --name=<name>)
|
|
|
|
|
2016-08-08 07:43:10 +02:00
|
|
|
Returns
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) State of claims assigned for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'claims': (list) list of claims for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
[
|
|
|
|
{
|
2017-03-14 00:14:11 +01:00
|
|
|
'amount': (float) amount assigned to the claim
|
|
|
|
'effective_amount': (float) total amount assigned to the claim,
|
|
|
|
including supports
|
|
|
|
'claim_id': (str) claim ID of the claim
|
|
|
|
'height': (int) height of block containing the claim
|
|
|
|
'txid': (str) txid of the claim
|
|
|
|
'nout': (int) nout of the claim
|
|
|
|
'supports': (list) a list of supports attached to the claim
|
|
|
|
'value': (str) the value of the claim
|
2017-03-11 22:04:10 +01:00
|
|
|
},
|
|
|
|
]
|
2017-03-14 00:14:11 +01:00
|
|
|
'supports_without_claims': (list) supports without any claims attached to them
|
|
|
|
'last_takeover_height': (int) the height of last takeover for the name
|
2017-03-11 22:04:10 +01:00
|
|
|
}
|
2016-08-08 07:43:10 +02:00
|
|
|
"""
|
|
|
|
|
2017-04-07 02:45:05 +02:00
|
|
|
claims = yield self.session.wallet.get_claims_for_name(name)
|
|
|
|
defer.returnValue(claims)
|
2016-08-08 07:43:10 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-06-09 19:47:32 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]):
|
|
|
|
"""
|
|
|
|
Get paginated claims in a channel specified by a channel uri
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_list_by_channel (<uri> | --uri=<uri>) [<uris>...] [--page=<page>]
|
|
|
|
[--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--page=<page> : which page of results to return where page 1 is the first
|
|
|
|
page, defaults to no pages
|
|
|
|
--page_size=<page_size> : number of results in a page, default of 10
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
{
|
|
|
|
resolved channel uri: {
|
|
|
|
If there was an error:
|
|
|
|
'error': (str) error message
|
|
|
|
|
2017-07-24 21:23:23 +02:00
|
|
|
'claims_in_channel': the total number of results for the channel,
|
2017-06-09 19:47:32 +02:00
|
|
|
|
|
|
|
If a page of results was requested:
|
|
|
|
'returned_page': page number returned,
|
|
|
|
'claims_in_channel': [
|
|
|
|
{
|
|
|
|
'absolute_channel_position': (int) claim index number in sorted list of
|
|
|
|
claims which assert to be part of the
|
|
|
|
channel
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number,
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
2017-08-31 15:58:30 +02:00
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
2017-06-09 19:47:32 +02:00
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
uris = tuple(uris)
|
|
|
|
if uri is not None:
|
|
|
|
uris += (uri, )
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
|
|
|
|
valid_uris = tuple()
|
|
|
|
for chan_uri in uris:
|
|
|
|
try:
|
|
|
|
parsed = parse_lbry_uri(chan_uri)
|
|
|
|
if not parsed.is_channel:
|
|
|
|
results[chan_uri] = {"error": "%s is not a channel uri" % parsed.name}
|
|
|
|
elif parsed.path:
|
|
|
|
results[chan_uri] = {"error": "%s is a claim in a channel" % parsed.path}
|
|
|
|
else:
|
|
|
|
valid_uris += (chan_uri, )
|
|
|
|
except URIParseError:
|
|
|
|
results[chan_uri] = {"error": "%s is not a valid uri" % chan_uri}
|
|
|
|
|
|
|
|
resolved = yield self.session.wallet.resolve(*valid_uris, check_cache=False, page=page,
|
|
|
|
page_size=page_size)
|
|
|
|
for u in resolved:
|
|
|
|
if 'error' in resolved[u]:
|
|
|
|
results[u] = resolved[u]
|
|
|
|
else:
|
|
|
|
results[u] = {
|
2017-07-24 21:23:23 +02:00
|
|
|
'claims_in_channel': resolved[u]['claims_in_channel']
|
2017-06-09 19:47:32 +02:00
|
|
|
}
|
|
|
|
if page:
|
|
|
|
results[u]['returned_page'] = page
|
|
|
|
results[u]['claims_in_channel'] = resolved[u].get('claims_in_channel', [])
|
|
|
|
|
|
|
|
response = yield self._render_response(results)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-08-31 15:58:30 +02:00
|
|
|
@AuthJSONRPCServer.flags(include_tip_info='-t')
|
|
|
|
def jsonrpc_transaction_list(self, include_tip_info=False):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
List transactions belonging to wallet
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-08-31 15:58:30 +02:00
|
|
|
transaction_list [-t]
|
|
|
|
|
|
|
|
Options:
|
2017-09-20 15:50:48 +02:00
|
|
|
-t : Include claim tip information
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
Returns:
|
2017-08-31 15:58:30 +02:00
|
|
|
(list) List of transactions, where is_tip is null by default,
|
|
|
|
and set to a boolean if include_tip_info is true
|
|
|
|
|
|
|
|
{
|
|
|
|
"claim_info": (list) claim info if in txn [{"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout}],
|
|
|
|
"confirmations": (int) number of confirmations for the txn,
|
|
|
|
"date": (str) date and time of txn,
|
|
|
|
"fee": (float) txn fee,
|
|
|
|
"support_info": (list) support info if in txn [{"amount": (float) support amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"is_tip": (null) default,
|
|
|
|
(bool) if include_tip_info is true,
|
|
|
|
"nout": (int) nout}],
|
|
|
|
"timestamp": (int) timestamp,
|
|
|
|
"txid": (str) txn id,
|
|
|
|
"update_info": (list) update info if in txn [{"amount": (float) updated amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout}],
|
|
|
|
"value": (float) value of txn
|
|
|
|
}
|
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
"""
|
|
|
|
|
2017-08-31 15:58:30 +02:00
|
|
|
d = self.session.wallet.get_history(include_tip_info)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2016-04-18 05:23:20 +02:00
|
|
|
return d
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_transaction_show(self, txid):
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
Get a decoded transaction from a txid
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
transaction_show (<txid> | --txid=<txid>)
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(dict) JSON formatted transaction
|
2017-01-03 20:13:01 +01:00
|
|
|
"""
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
d = self.session.wallet.get_transaction(txid)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2017-01-03 20:13:01 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_wallet_is_address_mine(self, address):
|
2016-09-02 07:27:30 +02:00
|
|
|
"""
|
|
|
|
Checks if an address is associated with the current wallet.
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
wallet_is_address_mine (<address> | --address=<address>)
|
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true, if address is associated with current wallet
|
2016-09-02 07:27:30 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
d = self.session.wallet.address_is_mine(address)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda is_mine: self._render_response(is_mine))
|
2016-09-02 07:27:30 +02:00
|
|
|
return d
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-03-17 20:07:18 +01:00
|
|
|
def jsonrpc_wallet_public_key(self, address):
|
2016-04-18 05:23:20 +02:00
|
|
|
"""
|
|
|
|
Get public key from wallet address
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
wallet_public_key (<address> | --address=<address>)
|
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
Returns:
|
2017-03-17 20:07:18 +01:00
|
|
|
(list) list of public keys associated with address.
|
|
|
|
Could contain more than one public key if multisig.
|
2016-04-18 05:23:20 +02:00
|
|
|
"""
|
|
|
|
|
2017-03-17 20:07:18 +01:00
|
|
|
d = self.session.wallet.get_pub_keys(address)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2017-03-17 20:07:18 +01:00
|
|
|
return d
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2017-03-19 15:51:39 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_wallet_list(self):
|
|
|
|
"""
|
|
|
|
List wallet addresses
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
wallet_list
|
|
|
|
|
2017-03-19 15:51:39 +01:00
|
|
|
Returns:
|
|
|
|
List of wallet addresses
|
|
|
|
"""
|
|
|
|
|
|
|
|
addresses = yield self.session.wallet.list_addresses()
|
|
|
|
response = yield self._render_response(addresses)
|
|
|
|
defer.returnValue(response)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_wallet_new_address(self):
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
|
|
|
Generate a new wallet address
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
wallet_new_address
|
|
|
|
|
2016-04-13 20:47:34 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) New wallet address in base58
|
2016-03-24 03:27:48 +01:00
|
|
|
"""
|
2016-04-13 20:47:34 +02:00
|
|
|
|
2016-02-25 23:17:07 +01:00
|
|
|
def _disp(address):
|
2016-07-16 08:15:58 +02:00
|
|
|
log.info("Got new wallet address: " + address)
|
2016-03-29 22:42:47 +02:00
|
|
|
return defer.succeed(address)
|
2016-02-25 23:17:07 +01:00
|
|
|
|
|
|
|
d = self.session.wallet.get_new_address()
|
|
|
|
d.addCallback(_disp)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda address: self._render_response(address))
|
2016-02-25 23:17:07 +01:00
|
|
|
return d
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
def jsonrpc_wallet_unused_address(self):
|
|
|
|
"""
|
|
|
|
Return an address containing no balance, will create
|
|
|
|
a new address if there is none.
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
wallet_unused_address
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
Returns:
|
|
|
|
(str) Unused wallet address in base58
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _disp(address):
|
|
|
|
log.info("Got unused wallet address: " + address)
|
|
|
|
return defer.succeed(address)
|
|
|
|
|
|
|
|
d = self.session.wallet.get_unused_address()
|
|
|
|
d.addCallback(_disp)
|
|
|
|
d.addCallback(lambda address: self._render_response(address))
|
|
|
|
return d
|
|
|
|
|
2017-08-15 17:11:38 +02:00
|
|
|
@AuthJSONRPCServer.deprecated("wallet_send")
|
2016-10-19 06:12:44 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
2017-04-27 02:02:00 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_send_amount_to_address(self, amount, address):
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2017-05-28 22:01:53 +02:00
|
|
|
Queue a payment of credits to an address
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
send_amount_to_address (<amount> | --amount=<amount>) (<address> | --address=<address>)
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
|
|
|
(bool) true if payment successfully scheduled
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
|
|
|
|
2017-07-07 22:37:35 +02:00
|
|
|
if amount < 0:
|
|
|
|
raise NegativeFundsError()
|
|
|
|
elif not amount:
|
|
|
|
raise NullFundsError()
|
|
|
|
|
2016-05-04 05:13:31 +02:00
|
|
|
reserved_points = self.session.wallet.reserve_points(address, amount)
|
|
|
|
if reserved_points is None:
|
2017-04-27 02:02:00 +02:00
|
|
|
raise InsufficientFundsError()
|
|
|
|
yield self.session.wallet.send_points_to_address(reserved_points, amount)
|
|
|
|
self.analytics_manager.send_credits_sent()
|
|
|
|
defer.returnValue(True)
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-08-15 17:11:38 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
2017-08-15 17:36:03 +02:00
|
|
|
def jsonrpc_wallet_send(self, amount, address=None, claim_id=None):
|
2017-08-15 17:11:38 +02:00
|
|
|
"""
|
|
|
|
Send credits. If given an address, send credits to it. If given a claim id, send a tip
|
|
|
|
to the owner of a claim specified by uri. A tip is a claim support where the recipient
|
|
|
|
of the support is the claim address for the claim being supported.
|
|
|
|
|
|
|
|
Usage:
|
2017-08-15 17:36:03 +02:00
|
|
|
wallet_send (<amount> | --amount=<amount>)
|
|
|
|
((<address> | --address=<address>) | (<claim_id> | --claim_id=<claim_id>))
|
2017-08-15 17:11:38 +02:00
|
|
|
|
|
|
|
Return:
|
|
|
|
If sending to an address:
|
|
|
|
(bool) true if payment successfully scheduled
|
|
|
|
|
|
|
|
If sending a claim tip:
|
|
|
|
(dict) Dictionary containing the result of the support
|
|
|
|
{
|
|
|
|
txid : (str) txid of resulting support claim
|
|
|
|
nout : (int) nout of the resulting support claim
|
|
|
|
fee : (float) fee paid for the transaction
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if address and claim_id:
|
|
|
|
raise Exception("Given both an address and a claim id")
|
|
|
|
elif not address and not claim_id:
|
|
|
|
raise Exception("Not given an address or a claim id")
|
2017-08-15 17:36:03 +02:00
|
|
|
if amount < 0:
|
|
|
|
raise NegativeFundsError()
|
|
|
|
elif not amount:
|
|
|
|
raise NullFundsError()
|
2017-08-15 17:11:38 +02:00
|
|
|
|
|
|
|
if address:
|
2017-09-06 16:58:52 +02:00
|
|
|
# raises an error if the address is invalid
|
|
|
|
decode_address(address)
|
2017-08-15 17:11:38 +02:00
|
|
|
result = yield self.jsonrpc_send_amount_to_address(amount, address)
|
|
|
|
else:
|
2017-08-15 17:56:26 +02:00
|
|
|
validate_claim_id(claim_id)
|
2017-08-15 17:11:38 +02:00
|
|
|
result = yield self.session.wallet.tip_claim(claim_id, amount)
|
|
|
|
self.analytics_manager.send_claim_action('new_support')
|
|
|
|
defer.returnValue(result)
|
|
|
|
|
2017-10-18 02:25:17 +02:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
@AuthJSONRPCServer.flags(no_broadcast='--no_broadcast')
|
|
|
|
def jsonrpc_wallet_prefill_addresses(self, num_addresses, amount, no_broadcast=False):
|
|
|
|
"""
|
|
|
|
Create new addresses, each containing `amount` credits
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_prefill_addresses [--no_broadcast]
|
|
|
|
(<num_addresses> | --num_addresses=<num_addresses>)
|
|
|
|
(<amount> | --amount=<amount>)
|
|
|
|
|
|
|
|
Returns:
|
2017-10-25 18:36:54 +02:00
|
|
|
(dict) the resulting transaction
|
2017-10-18 02:25:17 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if amount < 0:
|
|
|
|
raise NegativeFundsError()
|
|
|
|
elif not amount:
|
|
|
|
raise NullFundsError()
|
|
|
|
|
|
|
|
broadcast = not no_broadcast
|
|
|
|
tx = yield self.session.wallet.create_addresses_with_balance(
|
|
|
|
num_addresses, amount, broadcast=broadcast)
|
|
|
|
tx['broadcast'] = broadcast
|
|
|
|
defer.returnValue(tx)
|
|
|
|
|
2017-11-01 22:17:38 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-11-02 12:14:26 +01:00
|
|
|
def jsonrpc_utxo_list(self):
|
2017-11-01 22:17:38 +01:00
|
|
|
"""
|
|
|
|
List unspent transaction outputs
|
|
|
|
|
|
|
|
Usage:
|
2017-11-02 12:14:26 +01:00
|
|
|
utxo_list
|
2017-11-01 22:17:38 +01:00
|
|
|
|
|
|
|
Returns:
|
2017-11-02 12:14:26 +01:00
|
|
|
(list) List of unspent transaction outputs (UTXOs)
|
2017-11-01 22:17:38 +01:00
|
|
|
[
|
|
|
|
{
|
|
|
|
"address": (str) the output address
|
|
|
|
"amount": (float) unspent amount
|
|
|
|
"height": (int) block height
|
|
|
|
"is_claim": (bool) is the tx a claim
|
|
|
|
"is_coinbase": (bool) is the tx a coinbase tx
|
|
|
|
"is_support": (bool) is the tx a support
|
|
|
|
"is_update": (bool) is the tx an update
|
|
|
|
"nout": (int) nout of the output
|
|
|
|
"txid": (str) txid of the output
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
|
|
|
|
unspent = yield self.session.wallet.list_unspent()
|
|
|
|
for i, utxo in enumerate(unspent):
|
|
|
|
utxo['txid'] = utxo.pop('prevout_hash')
|
|
|
|
utxo['nout'] = utxo.pop('prevout_n')
|
|
|
|
utxo['amount'] = utxo.pop('value')
|
|
|
|
utxo['is_coinbase'] = utxo.pop('coinbase')
|
|
|
|
unspent[i] = utxo
|
|
|
|
|
|
|
|
defer.returnValue(unspent)
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def jsonrpc_block_show(self, blockhash=None, height=None):
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Get contents of a block
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
block_show (<blockhash> | --blockhash=<blockhash>) | (<height> | --height=<height>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<blockhash>, --blockhash=<blockhash> : hash of the block to look up
|
|
|
|
<height>, --height=<height> : height of the block to look up
|
|
|
|
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns:
|
|
|
|
(dict) Requested block
|
2016-05-04 05:13:31 +02:00
|
|
|
"""
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
if blockhash is not None:
|
|
|
|
d = self.session.wallet.get_block(blockhash)
|
|
|
|
elif height is not None:
|
|
|
|
d = self.session.wallet.get_block_info(height)
|
|
|
|
d.addCallback(lambda b: self.session.wallet.get_block(b))
|
2016-05-04 05:13:31 +02:00
|
|
|
else:
|
2016-12-30 07:12:20 +01:00
|
|
|
# TODO: return a useful error message
|
2016-05-04 05:13:31 +02:00
|
|
|
return server.failure
|
2017-01-26 02:06:17 +01:00
|
|
|
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2016-05-04 05:13:31 +02:00
|
|
|
return d
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_blob_get(self, blob_hash, timeout=None, encoding=None, payment_rate_manager=None):
|
|
|
|
"""
|
|
|
|
Download and return a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>]
|
|
|
|
[--encoding=<encoding>] [--payment_rate_manager=<payment_rate_manager>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--timeout=<timeout> : timeout in number of seconds
|
|
|
|
--encoding=<encoding> : by default no attempt at decoding is made,
|
|
|
|
can be set to one of the
|
|
|
|
following decoders:
|
|
|
|
'json'
|
|
|
|
--payment_rate_manager=<payment_rate_manager> : if not given the default payment rate
|
|
|
|
manager will be used.
|
|
|
|
supported alternative rate managers:
|
|
|
|
'only-free'
|
2017-02-16 05:39:17 +01:00
|
|
|
|
|
|
|
Returns
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/Fail message or (dict) decoded data
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
decoders = {
|
|
|
|
'json': json.loads
|
|
|
|
}
|
|
|
|
|
|
|
|
timeout = timeout or 30
|
|
|
|
payment_rate_manager = get_blob_payment_rate_manager(self.session, payment_rate_manager)
|
|
|
|
blob = yield self._download_blob(blob_hash, rate_manager=payment_rate_manager,
|
|
|
|
timeout=timeout)
|
|
|
|
if encoding and encoding in decoders:
|
|
|
|
blob_file = blob.open_for_reading()
|
|
|
|
result = decoders[encoding](blob_file.read())
|
2017-09-27 23:00:51 +02:00
|
|
|
blob_file.close()
|
2017-02-16 05:39:17 +01:00
|
|
|
else:
|
|
|
|
result = "Downloaded blob %s" % blob_hash
|
|
|
|
|
|
|
|
response = yield self._render_response(result)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
@AuthJSONRPCServer.auth_required
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def jsonrpc_blob_delete(self, blob_hash):
|
|
|
|
"""
|
|
|
|
Delete a blob
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_delete (<blob_hash> | --blob_hash=<blob_hash)
|
2017-06-09 18:14:03 +02:00
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/fail message
|
2017-02-16 05:39:17 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
if blob_hash not in self.session.blob_manager.blobs:
|
|
|
|
response = yield self._render_response("Don't have that blob")
|
|
|
|
defer.returnValue(response)
|
|
|
|
try:
|
|
|
|
stream_hash = yield self.stream_info_manager.get_stream_hash_for_sd_hash(blob_hash)
|
|
|
|
yield self.stream_info_manager.delete_stream(stream_hash)
|
|
|
|
except Exception as err:
|
|
|
|
pass
|
|
|
|
yield self.session.blob_manager.delete_blobs([blob_hash])
|
|
|
|
response = yield self._render_response("Deleted %s" % blob_hash)
|
|
|
|
defer.returnValue(response)
|
2016-05-04 05:13:31 +02:00
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
def jsonrpc_peer_list(self, blob_hash, timeout=None):
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
Get peers for blob hash
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-01 18:16:19 +02:00
|
|
|
peer_list (<blob_hash> | --blob_hash=<blob_hash>) [<timeout> | --timeout=<timeout>]
|
2017-05-28 22:01:53 +02:00
|
|
|
|
|
|
|
Options:
|
|
|
|
<timeout>, --timeout=<timeout> : peer search timeout in seconds
|
|
|
|
|
2016-08-03 09:16:06 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of contacts
|
2016-08-03 09:16:06 +02:00
|
|
|
"""
|
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
timeout = timeout or conf.settings['peer_search_timeout']
|
|
|
|
|
|
|
|
d = self.session.peer_finder.find_peers_for_blob(blob_hash, timeout=timeout)
|
2016-08-03 09:16:06 +02:00
|
|
|
d.addCallback(lambda r: [[c.host, c.port, c.is_available()] for c in r])
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2016-08-03 09:16:06 +02:00
|
|
|
return d
|
|
|
|
|
2017-07-21 20:05:08 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
@AuthJSONRPCServer.flags(announce_all="-a")
|
|
|
|
def jsonrpc_blob_announce(self, announce_all=None, blob_hash=None,
|
|
|
|
stream_hash=None, sd_hash=None):
|
|
|
|
"""
|
|
|
|
Announce blobs to the DHT
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_announce [-a] [<blob_hash> | --blob_hash=<blob_hash>]
|
|
|
|
[<stream_hash> | --stream_hash=<stream_hash>]
|
|
|
|
[<sd_hash> | --sd_hash=<sd_hash>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-a : announce all the blobs possessed by user
|
|
|
|
<blob_hash>, --blob_hash=<blob_hash> : announce a blob, specified by blob_hash
|
|
|
|
<stream_hash>, --stream_hash=<stream_hash> : announce all blobs associated with
|
|
|
|
stream_hash
|
|
|
|
<sd_hash>, --sd_hash=<sd_hash> : announce all blobs associated with
|
|
|
|
sd_hash and the sd_hash itself
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if successful
|
|
|
|
"""
|
|
|
|
if announce_all:
|
|
|
|
yield self.session.blob_manager.immediate_announce_all_blobs()
|
|
|
|
else:
|
2017-09-20 17:16:08 +02:00
|
|
|
if blob_hash:
|
|
|
|
blob_hashes = [blob_hash]
|
|
|
|
elif stream_hash:
|
|
|
|
blobs = yield self.get_blobs_for_stream_hash(stream_hash)
|
2017-09-20 20:02:34 +02:00
|
|
|
blob_hashes = [blob.blob_hash for blob in blobs if blob.get_is_verified()]
|
2017-09-20 17:16:08 +02:00
|
|
|
elif sd_hash:
|
|
|
|
blobs = yield self.get_blobs_for_sd_hash(sd_hash)
|
2017-09-20 20:02:34 +02:00
|
|
|
blob_hashes = [sd_hash] + [blob.blob_hash for blob in blobs if
|
|
|
|
blob.get_is_verified()]
|
2017-09-20 17:16:08 +02:00
|
|
|
else:
|
|
|
|
raise Exception('single argument must be specified')
|
|
|
|
yield self.session.blob_manager._immediate_announce(blob_hashes)
|
2017-07-21 20:05:08 +02:00
|
|
|
|
|
|
|
response = yield self._render_response(True)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
2017-09-20 17:16:08 +02:00
|
|
|
@AuthJSONRPCServer.deprecated("blob_announce")
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_blob_announce_all(self):
|
|
|
|
"""
|
|
|
|
Announce all blobs to the DHT
|
2016-08-11 18:36:13 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_announce_all
|
|
|
|
|
2016-08-11 18:36:13 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(str) Success/fail message
|
2016-08-11 18:36:13 +02:00
|
|
|
"""
|
2017-09-20 17:16:08 +02:00
|
|
|
return self.jsonrpc_blob_announce(announce_all=True)
|
2016-08-11 18:38:10 +02:00
|
|
|
|
2017-03-16 20:35:54 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-08-04 20:48:35 +02:00
|
|
|
def jsonrpc_file_reflect(self, **kwargs):
|
2016-08-18 03:33:41 +02:00
|
|
|
"""
|
2017-08-04 20:48:35 +02:00
|
|
|
Reflect all the blobs in a file matching the filter criteria
|
2016-08-18 03:33:41 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-08-04 20:48:35 +02:00
|
|
|
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--claim_id=<claim_id>]
|
|
|
|
[--outpoint=<outpoint>] [--rowid=<rowid>] [--name=<name>]
|
|
|
|
[--reflector=<reflector>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--sd_hash=<sd_hash> : get file with matching sd hash
|
|
|
|
--file_name=<file_name> : get file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : get file with matching stream hash
|
|
|
|
--claim_id=<claim_id> : get file with matching claim id
|
|
|
|
--outpoint=<outpoint> : get file with matching claim outpoint
|
|
|
|
--rowid=<rowid> : get file with matching row id
|
|
|
|
--name=<name> : get file with matching associated name claim
|
|
|
|
--reflector=<reflector> : reflector server, ip address or url
|
|
|
|
by default choose a server from the config
|
2017-05-28 22:01:53 +02:00
|
|
|
|
2016-08-18 03:33:41 +02:00
|
|
|
Returns:
|
2017-08-04 20:48:35 +02:00
|
|
|
(list) list of blobs reflected
|
2016-08-18 03:33:41 +02:00
|
|
|
"""
|
|
|
|
|
2017-08-04 20:48:35 +02:00
|
|
|
reflector_server = kwargs.get('reflector', None)
|
|
|
|
lbry_files = yield self._get_lbry_files(**kwargs)
|
|
|
|
|
|
|
|
if len(lbry_files) > 1:
|
|
|
|
raise Exception('Too many (%i) files found, need one' % len(lbry_files))
|
|
|
|
elif not lbry_files:
|
|
|
|
raise Exception('No file found')
|
|
|
|
lbry_file = lbry_files[0]
|
|
|
|
|
|
|
|
results = yield reupload.reflect_stream(lbry_file, reflector_server=reflector_server)
|
|
|
|
defer.returnValue(results)
|
2016-08-18 03:33:41 +02:00
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-06-22 00:16:41 +02:00
|
|
|
@AuthJSONRPCServer.flags(needed="-n", finished="-f")
|
2017-02-16 05:39:17 +01:00
|
|
|
def jsonrpc_blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
|
|
|
|
finished=None, page_size=None, page=None):
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
2017-03-14 00:14:11 +01:00
|
|
|
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
|
2016-08-26 06:32:33 +02:00
|
|
|
|
2017-06-22 00:16:41 +02:00
|
|
|
Usage:
|
|
|
|
blob_list [-n] [-f] [<uri> | --uri=<uri>] [<stream_hash> | --stream_hash=<stream_hash>]
|
|
|
|
[<sd_hash> | --sd_hash=<sd_hash>] [<page_size> | --page_size=<page_size>]
|
|
|
|
[<page> | --page=<page>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
-n : only return needed blobs
|
|
|
|
-f : only return finished blobs
|
|
|
|
<uri>, --uri=<uri> : filter blobs by stream in a uri
|
|
|
|
<stream_hash>, --stream_hash=<stream_hash> : filter blobs by stream hash
|
|
|
|
<sd_hash>, --sd_hash=<sd_hash> : filter blobs by sd hash
|
|
|
|
<page_size>, --page_size=<page_size> : results page size
|
|
|
|
<page>, --page=<page> : page of results to return
|
|
|
|
|
2016-08-26 06:32:33 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(list) List of blob hashes
|
2016-08-19 08:41:23 +02:00
|
|
|
"""
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
if uri:
|
|
|
|
metadata = yield self._resolve_name(uri)
|
2017-03-06 23:01:35 +01:00
|
|
|
sd_hash = utils.get_sd_hash(metadata)
|
2017-02-16 05:39:17 +01:00
|
|
|
blobs = yield self.get_blobs_for_sd_hash(sd_hash)
|
|
|
|
elif stream_hash:
|
|
|
|
try:
|
|
|
|
blobs = yield self.get_blobs_for_stream_hash(stream_hash)
|
|
|
|
except NoSuchStreamHash:
|
|
|
|
blobs = []
|
|
|
|
elif sd_hash:
|
|
|
|
try:
|
|
|
|
blobs = yield self.get_blobs_for_sd_hash(sd_hash)
|
|
|
|
except NoSuchSDHash:
|
|
|
|
blobs = []
|
|
|
|
else:
|
|
|
|
blobs = self.session.blob_manager.blobs.itervalues()
|
|
|
|
|
|
|
|
if needed:
|
2017-09-20 20:02:34 +02:00
|
|
|
blobs = [blob for blob in blobs if not blob.get_is_verified()]
|
2017-02-16 05:39:17 +01:00
|
|
|
if finished:
|
2017-09-20 20:02:34 +02:00
|
|
|
blobs = [blob for blob in blobs if blob.get_is_verified()]
|
2017-02-16 05:39:17 +01:00
|
|
|
|
|
|
|
blob_hashes = [blob.blob_hash for blob in blobs]
|
|
|
|
page_size = page_size or len(blob_hashes)
|
|
|
|
page = page or 0
|
|
|
|
start_index = page * page_size
|
|
|
|
stop_index = start_index + page_size
|
|
|
|
blob_hashes_for_return = blob_hashes[start_index:stop_index]
|
|
|
|
response = yield self._render_response(blob_hashes_for_return)
|
|
|
|
defer.returnValue(response)
|
2016-08-27 01:58:53 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
def jsonrpc_blob_reflect_all(self):
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
|
|
|
Reflects all saved blobs
|
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
|
|
|
blob_reflect_all
|
|
|
|
|
2016-08-27 01:58:53 +02:00
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(bool) true if successful
|
2016-08-27 01:58:53 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
d = self.session.blob_manager.get_all_verified_blobs()
|
2017-02-10 16:56:22 +01:00
|
|
|
d.addCallback(reupload.reflect_blob_hashes, self.session.blob_manager)
|
2017-01-07 01:21:25 +01:00
|
|
|
d.addCallback(lambda r: self._render_response(r))
|
2016-08-19 08:41:23 +02:00
|
|
|
return d
|
|
|
|
|
2017-10-10 21:04:48 +02:00
|
|
|
def jsonrpc_routing_table_get(self):
|
|
|
|
"""
|
|
|
|
Get DHT routing information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
routing_table_get
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) dictionary containing routing and contact information
|
|
|
|
{
|
|
|
|
"buckets": {
|
|
|
|
<bucket index>: [
|
|
|
|
{
|
|
|
|
"address": (str) peer address,
|
|
|
|
"node_id": (str) peer node id,
|
|
|
|
"blobs": (list) blob hashes announced by peer
|
|
|
|
}
|
2017-10-11 21:14:29 +02:00
|
|
|
]
|
|
|
|
},
|
2017-10-10 21:04:48 +02:00
|
|
|
"contacts": (list) contact node ids,
|
2017-10-11 21:14:29 +02:00
|
|
|
"blob_hashes": (list) all of the blob hashes stored by peers in the list of buckets,
|
2017-10-10 21:04:48 +02:00
|
|
|
"node_id": (str) the local dht node id
|
2017-10-11 21:14:29 +02:00
|
|
|
}
|
2017-10-10 21:04:48 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
result = {}
|
|
|
|
data_store = deepcopy(self.session.dht_node._dataStore._dict)
|
|
|
|
datastore_len = len(data_store)
|
|
|
|
hosts = {}
|
|
|
|
|
|
|
|
if datastore_len:
|
|
|
|
for k, v in data_store.iteritems():
|
|
|
|
for value, lastPublished, originallyPublished, originalPublisherID in v:
|
|
|
|
try:
|
|
|
|
contact = self.session.dht_node._routingTable.getContact(
|
|
|
|
originalPublisherID)
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
if contact in hosts:
|
|
|
|
blobs = hosts[contact]
|
|
|
|
else:
|
|
|
|
blobs = []
|
|
|
|
blobs.append(k.encode('hex'))
|
|
|
|
hosts[contact] = blobs
|
|
|
|
|
|
|
|
contact_set = []
|
|
|
|
blob_hashes = []
|
|
|
|
result['buckets'] = {}
|
|
|
|
|
|
|
|
for i in range(len(self.session.dht_node._routingTable._buckets)):
|
|
|
|
for contact in self.session.dht_node._routingTable._buckets[i]._contacts:
|
|
|
|
contacts = result['buckets'].get(i, [])
|
|
|
|
if contact in hosts:
|
|
|
|
blobs = hosts[contact]
|
|
|
|
del hosts[contact]
|
|
|
|
else:
|
|
|
|
blobs = []
|
|
|
|
host = {
|
|
|
|
"address": contact.address,
|
|
|
|
"node_id": contact.id.encode("hex"),
|
|
|
|
"blobs": blobs,
|
|
|
|
}
|
|
|
|
for blob_hash in blobs:
|
|
|
|
if blob_hash not in blob_hashes:
|
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
contacts.append(host)
|
|
|
|
result['buckets'][i] = contacts
|
|
|
|
if contact.id.encode('hex') not in contact_set:
|
|
|
|
contact_set.append(contact.id.encode("hex"))
|
|
|
|
|
|
|
|
result['contacts'] = contact_set
|
|
|
|
result['blob_hashes'] = blob_hashes
|
|
|
|
result['node_id'] = self.session.dht_node.node_id.encode('hex')
|
|
|
|
return self._render_response(result)
|
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-04-10 19:26:47 +02:00
|
|
|
def jsonrpc_get_availability(self, uri, sd_timeout=None, peer_timeout=None):
|
2016-09-30 19:28:01 +02:00
|
|
|
"""
|
2017-04-10 19:26:47 +02:00
|
|
|
Get stream availability for lbry uri
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-05-28 22:01:53 +02:00
|
|
|
Usage:
|
2017-06-01 18:16:19 +02:00
|
|
|
get_availability (<uri> | --uri=<uri>) [<sd_timeout> | --sd_timeout=<sd_timeout>]
|
2017-05-28 22:01:53 +02:00
|
|
|
[<peer_timeout> | --peer_timeout=<peer_timeout>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
<sd_timeout>, --sd_timeout=<sd_timeout> : sd blob download timeout
|
|
|
|
<peer_timeout>, --peer_timeout=<peer_timeout> : how long to look for peers
|
2016-09-30 19:28:01 +02:00
|
|
|
|
|
|
|
Returns:
|
2017-03-14 00:14:11 +01:00
|
|
|
(float) Peers per blob / total blobs
|
2016-09-30 19:28:01 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
def _get_mean(blob_availabilities):
|
|
|
|
peer_counts = []
|
|
|
|
for blob_availability in blob_availabilities:
|
|
|
|
for blob, peers in blob_availability.iteritems():
|
|
|
|
peer_counts.append(peers)
|
2016-09-30 19:35:13 +02:00
|
|
|
if peer_counts:
|
|
|
|
return round(1.0 * sum(peer_counts) / len(peer_counts), 2)
|
|
|
|
else:
|
|
|
|
return 0.0
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
def read_sd_blob(sd_blob):
|
|
|
|
sd_blob_file = sd_blob.open_for_reading()
|
|
|
|
decoded_sd_blob = json.loads(sd_blob_file.read())
|
2017-09-27 23:00:51 +02:00
|
|
|
sd_blob_file.close()
|
2017-02-16 05:39:17 +01:00
|
|
|
return decoded_sd_blob
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
resolved_result = yield self.session.wallet.resolve(uri)
|
|
|
|
if resolved_result and uri in resolved_result:
|
|
|
|
resolved = resolved_result[uri]
|
|
|
|
else:
|
2017-04-10 19:26:47 +02:00
|
|
|
defer.returnValue(None)
|
|
|
|
|
2017-06-09 19:47:13 +02:00
|
|
|
if 'claim' in resolved:
|
2017-04-10 19:26:47 +02:00
|
|
|
metadata = resolved['claim']['value']
|
|
|
|
else:
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
sd_hash = utils.get_sd_hash(metadata)
|
2017-02-16 05:38:33 +01:00
|
|
|
sd_timeout = sd_timeout or conf.settings['sd_download_timeout']
|
|
|
|
peer_timeout = peer_timeout or conf.settings['peer_search_timeout']
|
|
|
|
blobs = []
|
|
|
|
try:
|
|
|
|
blobs = yield self.get_blobs_for_sd_hash(sd_hash)
|
|
|
|
need_sd_blob = False
|
|
|
|
log.info("Already have sd blob")
|
|
|
|
except NoSuchSDHash:
|
|
|
|
need_sd_blob = True
|
|
|
|
log.info("Need sd blob")
|
2017-04-10 19:26:47 +02:00
|
|
|
|
2017-02-16 05:38:33 +01:00
|
|
|
blob_hashes = [blob.blob_hash for blob in blobs]
|
|
|
|
if need_sd_blob:
|
|
|
|
# we don't want to use self._download_descriptor here because it would create a stream
|
2017-02-20 01:26:48 +01:00
|
|
|
try:
|
|
|
|
sd_blob = yield self._download_blob(sd_hash, timeout=sd_timeout)
|
|
|
|
except Exception as err:
|
|
|
|
response = yield self._render_response(0.0)
|
2017-04-10 19:26:47 +02:00
|
|
|
log.warning(err)
|
2017-02-20 01:26:48 +01:00
|
|
|
defer.returnValue(response)
|
2017-02-16 05:38:33 +01:00
|
|
|
decoded = read_sd_blob(sd_blob)
|
|
|
|
blob_hashes = [blob.get("blob_hash") for blob in decoded['blobs']
|
|
|
|
if blob.get("blob_hash")]
|
|
|
|
sample = random.sample(blob_hashes, min(len(blob_hashes), 5))
|
|
|
|
log.info("check peers for %i of %i blobs in stream", len(sample), len(blob_hashes))
|
|
|
|
availabilities = yield self.session.blob_tracker.get_availability_for_blobs(sample,
|
|
|
|
peer_timeout)
|
|
|
|
mean_availability = _get_mean(availabilities)
|
|
|
|
response = yield self._render_response(mean_availability)
|
|
|
|
defer.returnValue(response)
|
2016-09-30 19:28:01 +02:00
|
|
|
|
2017-06-01 18:10:19 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-06-05 22:17:19 +02:00
|
|
|
@AuthJSONRPCServer.flags(a_arg='-a', b_arg='-b')
|
|
|
|
def jsonrpc_cli_test_command(self, pos_arg, pos_args=[], pos_arg2=None, pos_arg3=None,
|
|
|
|
a_arg=False, b_arg=False):
|
2017-06-01 18:10:19 +02:00
|
|
|
"""
|
|
|
|
This command is only for testing the CLI argument parsing
|
|
|
|
Usage:
|
2017-06-05 22:17:19 +02:00
|
|
|
cli_test_command [-a] [-b] (<pos_arg> | --pos_arg=<pos_arg>)
|
2017-06-01 18:10:19 +02:00
|
|
|
[<pos_args>...] [--pos_arg2=<pos_arg2>]
|
|
|
|
[--pos_arg3=<pos_arg3>]
|
|
|
|
|
|
|
|
Options:
|
2017-06-05 22:17:19 +02:00
|
|
|
-a, --a_arg : a arg
|
|
|
|
-b, --b_arg : b arg
|
2017-06-01 18:10:19 +02:00
|
|
|
<pos_arg2>, --pos_arg2=<pos_arg2> : pos arg 2
|
|
|
|
<pos_arg3>, --pos_arg3=<pos_arg3> : pos arg 3
|
|
|
|
Returns:
|
|
|
|
pos args
|
|
|
|
"""
|
2017-06-05 22:17:19 +02:00
|
|
|
out = (pos_arg, pos_args, pos_arg2, pos_arg3, a_arg, b_arg)
|
2017-06-01 18:10:19 +02:00
|
|
|
response = yield self._render_response(out)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
|
2017-01-02 20:52:24 +01:00
|
|
|
def loggly_time_string(dt):
|
|
|
|
formatted_dt = dt.strftime("%Y-%m-%dT%H:%M:%S")
|
2017-01-03 20:13:01 +01:00
|
|
|
milliseconds = str(round(dt.microsecond * (10.0 ** -5), 3))
|
2017-01-02 22:09:28 +01:00
|
|
|
return urllib.quote_plus(formatted_dt + milliseconds + "Z")
|
2017-01-02 20:52:24 +01:00
|
|
|
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def get_loggly_query_string(installation_id):
|
2017-01-02 22:09:28 +01:00
|
|
|
base_loggly_search_url = "https://lbry.loggly.com/search#"
|
2017-01-02 20:52:24 +01:00
|
|
|
now = utils.now()
|
|
|
|
yesterday = now - utils.timedelta(days=1)
|
2017-01-02 22:09:28 +01:00
|
|
|
params = {
|
2017-02-02 16:23:17 +01:00
|
|
|
'terms': 'json.installation_id:{}*'.format(installation_id[:SHORT_ID_LEN]),
|
2017-01-02 22:09:28 +01:00
|
|
|
'from': loggly_time_string(yesterday),
|
|
|
|
'to': loggly_time_string(now)
|
|
|
|
}
|
|
|
|
data = urllib.urlencode(params)
|
|
|
|
return base_loggly_search_url + data
|
2017-01-02 20:52:24 +01:00
|
|
|
|
|
|
|
|
2017-02-02 16:23:17 +01:00
|
|
|
def report_bug_to_slack(message, installation_id, platform_name, app_version):
|
2017-02-09 04:01:56 +01:00
|
|
|
webhook = utils.deobfuscate(conf.settings['SLACK_WEBHOOK'])
|
2017-01-02 20:52:24 +01:00
|
|
|
payload_template = "os: %s\n version: %s\n<%s|loggly>\n%s"
|
|
|
|
payload_params = (
|
|
|
|
platform_name,
|
|
|
|
app_version,
|
2017-02-02 16:23:17 +01:00
|
|
|
get_loggly_query_string(installation_id),
|
2017-01-02 20:52:24 +01:00
|
|
|
message
|
|
|
|
)
|
|
|
|
payload = {
|
|
|
|
"text": payload_template % payload_params
|
|
|
|
}
|
|
|
|
requests.post(webhook, json.dumps(payload))
|
|
|
|
|
|
|
|
|
2017-02-02 05:40:03 +01:00
|
|
|
def get_lbry_file_search_value(search_fields):
|
2017-03-06 23:01:35 +01:00
|
|
|
for searchtype in FileID:
|
|
|
|
value = search_fields.get(searchtype, None)
|
|
|
|
if value is not None:
|
2016-10-20 21:52:37 +02:00
|
|
|
return searchtype, value
|
2017-02-02 05:40:03 +01:00
|
|
|
raise NoValidSearch('{} is missing a valid search type'.format(search_fields))
|
2017-02-16 05:39:17 +01:00
|
|
|
|
|
|
|
|
2017-03-06 23:01:35 +01:00
|
|
|
def iter_lbry_file_search_values(search_fields):
|
|
|
|
for searchtype in FileID:
|
|
|
|
value = search_fields.get(searchtype, None)
|
|
|
|
if value is not None:
|
|
|
|
yield searchtype, value
|
|
|
|
|
|
|
|
|
2017-02-16 05:39:17 +01:00
|
|
|
def get_blob_payment_rate_manager(session, payment_rate_manager=None):
|
|
|
|
if payment_rate_manager:
|
|
|
|
rate_managers = {
|
|
|
|
'only-free': OnlyFreePaymentsManager()
|
|
|
|
}
|
|
|
|
if payment_rate_manager in rate_managers:
|
|
|
|
payment_rate_manager = rate_managers[payment_rate_manager]
|
|
|
|
log.info("Downloading blob with rate manager: %s", payment_rate_manager)
|
|
|
|
return payment_rate_manager or session.payment_rate_manager
|