2016-03-03 00:32:48 +01:00
import locale
2016-02-29 19:25:47 +01:00
import os
import sys
2016-03-14 17:30:22 +01:00
import simplejson as json
2016-02-29 19:25:47 +01:00
import binascii
import subprocess
import logging
2016-03-03 00:32:48 +01:00
import requests
2016-03-17 05:44:04 +01:00
# import rumps
# import httplib2
2016-02-29 19:25:47 +01:00
2016-03-14 17:30:22 +01:00
from twisted . web import server , resource , static
from twisted . internet import defer , threads , error , reactor
2016-03-21 03:53:01 +01:00
from txjsonrpc import jsonrpclib
2016-03-14 17:30:22 +01:00
from txjsonrpc . web import jsonrpc
2016-03-17 05:44:04 +01:00
from jsonrpc . proxy import JSONRPCProxy
2016-02-29 19:25:47 +01:00
from datetime import datetime
from decimal import Decimal
from StringIO import StringIO
from zipfile import ZipFile
from urllib import urlopen
2016-03-14 17:30:22 +01:00
from appdirs import user_data_dir
2016-02-29 19:25:47 +01:00
2016-01-26 02:28:05 +01:00
from lbrynet . core . PaymentRateManager import PaymentRateManager
from lbrynet . core . server . BlobAvailabilityHandler import BlobAvailabilityHandlerFactory
from lbrynet . core . server . BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet . core . server . ServerProtocol import ServerProtocolFactory
from lbrynet . lbrynet_console . ControlHandlers import get_time_behind_blockchain
2016-01-12 18:42:32 +01:00
from lbrynet . core . Error import UnknownNameError
2015-12-06 23:32:17 +01:00
from lbrynet . lbryfile . StreamDescriptor import LBRYFileStreamType
from lbrynet . lbryfile . client . LBRYFileDownloader import LBRYFileSaverFactory , LBRYFileOpenerFactory
from lbrynet . lbryfile . client . LBRYFileOptions import add_lbry_file_to_sd_identifier
2015-12-14 21:54:33 +01:00
from lbrynet . lbrynet_daemon . LBRYDownloader import GetStream , FetcherDaemon
2016-01-21 04:00:28 +01:00
from lbrynet . lbrynet_daemon . LBRYPublisher import Publisher
2015-12-06 23:32:17 +01:00
from lbrynet . core . utils import generate_id
from lbrynet . lbrynet_console . LBRYSettings import LBRYSettings
2016-02-15 16:12:49 +01:00
from lbrynet . conf import MIN_BLOB_DATA_PAYMENT_RATE , DEFAULT_MAX_SEARCH_RESULTS , KNOWN_DHT_NODES , DEFAULT_MAX_KEY_FEE
2016-03-17 15:19:36 +01:00
from lbrynet . conf import API_CONNECTION_STRING , API_PORT , API_ADDRESS , DEFAULT_TIMEOUT
2016-01-24 06:10:22 +01:00
from lbrynet . core . StreamDescriptor import StreamDescriptorIdentifier , download_sd_blob
2015-12-06 23:32:17 +01:00
from lbrynet . core . Session import LBRYSession
from lbrynet . core . PTCWallet import PTCWallet
2016-02-23 04:32:07 +01:00
from lbrynet . core . LBRYcrdWallet import LBRYcrdWallet , LBRYumWallet
2015-12-06 23:32:17 +01:00
from lbrynet . lbryfilemanager . LBRYFileManager import LBRYFileManager
from lbrynet . lbryfile . LBRYFileMetadataManager import DBLBRYFileMetadataManager , TempLBRYFileMetadataManager
2016-02-23 04:32:07 +01:00
2015-12-14 16:22:52 +01:00
log = logging . getLogger ( __name__ )
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
BAD_REQUEST = 400
NOT_FOUND = 404
OK_CODE = 200
2016-01-17 05:06:24 +01:00
2016-02-16 19:39:08 +01:00
# TODO add login credentials in a conf file
# TODO alert if your copy of a lbry file is out of date with the name record
2016-01-12 00:01:46 +01:00
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
class Bunch :
def __init__ ( self , params ) :
self . __dict__ . update ( params )
2016-03-08 18:15:49 +01:00
2016-03-14 17:30:22 +01:00
class LBRYDaemon ( jsonrpc . JSONRPC ) :
2015-12-06 23:32:17 +01:00
"""
2016-03-14 17:30:22 +01:00
LBRYnet daemon , a jsonrpc interface to lbry functions
2015-12-06 23:32:17 +01:00
"""
2016-03-14 17:30:22 +01:00
isLeaf = True
2015-12-06 23:32:17 +01:00
2016-03-21 03:53:01 +01:00
def render ( self , request ) :
request . content . seek ( 0 , 0 )
# Unmarshal the JSON-RPC data.
content = request . content . read ( )
parsed = jsonrpclib . loads ( content )
functionPath = parsed . get ( " method " )
args = parsed . get ( ' params ' )
id = parsed . get ( ' id ' )
version = parsed . get ( ' jsonrpc ' )
if version :
version = int ( float ( version ) )
elif id and not version :
version = jsonrpclib . VERSION_1
else :
version = jsonrpclib . VERSION_PRE1
# XXX this all needs to be re-worked to support logic for multiple
# versions...
try :
function = self . _getFunction ( functionPath )
except jsonrpclib . Fault , f :
self . _cbRender ( f , request , id , version )
else :
request . setHeader ( ' Access-Control-Allow-Origin ' , ( ' http://localhost ' + ' : ' + str ( API_PORT ) ) )
request . setHeader ( " content-type " , " text/json " )
2016-03-21 15:12:01 +01:00
if args == [ { } ] :
d = defer . maybeDeferred ( function )
else :
d = defer . maybeDeferred ( function , * args )
2016-03-21 03:53:01 +01:00
d . addErrback ( self . _ebRender , id )
d . addCallback ( self . _cbRender , request , id , version )
return server . NOT_DONE_YET
2016-02-25 23:17:07 +01:00
def setup ( self , wallet_type , check_for_updates ) :
def _set_vars ( wallet_type , check_for_updates ) :
2016-03-14 17:30:22 +01:00
reactor . addSystemEventTrigger ( ' before ' , ' shutdown ' , self . _shutdown )
2015-12-14 18:59:44 +01:00
self . fetcher = None
2015-12-14 16:22:52 +01:00
self . current_db_revision = 1
2015-12-06 23:32:17 +01:00
self . run_server = True
self . session = None
2016-02-15 16:12:49 +01:00
self . known_dht_nodes = KNOWN_DHT_NODES
2016-02-16 19:39:08 +01:00
if sys . platform != " darwin " :
self . db_dir = os . path . join ( os . path . expanduser ( " ~ " ) , " .lbrynet " )
else :
2016-03-14 17:30:22 +01:00
self . db_dir = user_data_dir ( " LBRY " )
# self.db_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrynet")
2015-12-06 23:32:17 +01:00
self . blobfile_dir = os . path . join ( self . db_dir , " blobfiles " )
self . peer_port = 3333
self . dht_node_port = 4444
self . first_run = False
if os . name == " nt " :
from lbrynet . winhelpers . knownpaths import get_path , FOLDERID , UserHandle
self . download_directory = get_path ( FOLDERID . Downloads , UserHandle . current )
self . wallet_dir = os . path . join ( get_path ( FOLDERID . RoamingAppData , UserHandle . current ) , " lbrycrd " )
2016-01-07 16:24:55 +01:00
elif sys . platform == " darwin " :
2015-12-06 23:32:17 +01:00
self . download_directory = os . path . join ( os . path . expanduser ( " ~ " ) , ' Downloads ' )
2016-03-14 17:30:22 +01:00
# self.wallet_dir = os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd")
self . wallet_dir = user_data_dir ( " LBRY " )
2016-01-07 16:24:55 +01:00
else :
2015-12-06 23:32:17 +01:00
self . wallet_dir = os . path . join ( os . path . expanduser ( " ~ " ) , " .lbrycrd " )
2016-01-07 16:24:55 +01:00
self . download_directory = os . path . join ( os . path . expanduser ( " ~ " ) , ' Downloads ' )
2016-03-22 05:03:17 +01:00
self . daemon_conf = os . path . join ( self . wallet_dir , ' daemon_settings.conf ' )
2015-12-06 23:32:17 +01:00
self . wallet_conf = os . path . join ( self . wallet_dir , " lbrycrd.conf " )
self . wallet_user = None
self . wallet_password = None
self . sd_identifier = StreamDescriptorIdentifier ( )
self . stream_info_manager = TempLBRYFileMetadataManager ( )
self . wallet_rpc_port = 8332
2015-12-08 06:37:49 +01:00
self . downloads = [ ]
2015-12-06 23:32:17 +01:00
self . stream_frames = [ ]
self . default_blob_data_payment_rate = MIN_BLOB_DATA_PAYMENT_RATE
self . use_upnp = True
self . start_lbrycrdd = True
if os . name == " nt " :
self . lbrycrdd_path = " lbrycrdd.exe "
else :
self . lbrycrdd_path = " ./lbrycrdd "
self . delete_blobs_on_remove = True
self . blob_request_payment_rate_manager = None
self . lbry_file_metadata_manager = None
self . lbry_file_manager = None
self . settings = LBRYSettings ( self . db_dir )
2016-02-23 04:32:07 +01:00
self . wallet_type = wallet_type
2016-02-25 23:17:07 +01:00
self . check_for_updates = check_for_updates
2016-01-07 16:24:55 +01:00
self . lbrycrd_conf = os . path . join ( self . wallet_dir , " lbrycrd.conf " )
self . autofetcher_conf = os . path . join ( self . wallet_dir , " autofetcher.conf " )
2015-12-14 16:22:52 +01:00
self . created_data_dir = False
if not os . path . exists ( self . db_dir ) :
os . mkdir ( self . db_dir )
self . created_data_dir = True
2016-01-07 16:24:55 +01:00
self . session_settings = None
2016-02-11 14:32:48 +01:00
self . data_rate = MIN_BLOB_DATA_PAYMENT_RATE
2016-02-15 16:12:49 +01:00
self . max_key_fee = DEFAULT_MAX_KEY_FEE
self . max_search_results = DEFAULT_MAX_SEARCH_RESULTS
2016-03-03 00:32:48 +01:00
self . startup_message = " "
self . announced_startup = False
2016-02-15 16:12:49 +01:00
self . search_timeout = 3.0
2016-01-26 02:28:05 +01:00
self . query_handlers = { }
2016-03-22 05:03:17 +01:00
self . default_settings = {
' run_on_startup ' : False ,
' data_rate ' : MIN_BLOB_DATA_PAYMENT_RATE ,
' max_key_fee ' : 10.0 ,
' default_download_directory ' : self . download_directory ,
' max_upload ' : 0.0 ,
' max_download ' : 0.0
}
2016-01-26 02:28:05 +01:00
2015-12-06 23:32:17 +01:00
return defer . succeed ( None )
2016-01-11 21:52:38 +01:00
def _disp_startup ( ) :
2016-03-22 14:40:18 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Started lbrynet-daemon " )
2016-02-16 19:39:08 +01:00
2016-01-11 21:52:38 +01:00
return defer . succeed ( None )
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Starting lbrynet-daemon " )
2016-02-25 23:17:07 +01:00
2015-12-06 23:32:17 +01:00
d = defer . Deferred ( )
2016-03-22 05:03:17 +01:00
d . addCallback ( lambda _ : _set_vars ( wallet_type , check_for_updates ) )
d . addCallback ( lambda _ : self . _setup_daemon_settings ( ) )
2015-12-14 16:22:52 +01:00
d . addCallback ( lambda _ : threads . deferToThread ( self . _setup_data_directory ) )
d . addCallback ( lambda _ : self . _check_db_migration ( ) )
2015-12-06 23:32:17 +01:00
d . addCallback ( lambda _ : self . _get_settings ( ) )
2016-01-20 03:07:16 +01:00
d . addCallback ( lambda _ : self . _get_lbrycrdd_path ( ) )
2015-12-06 23:32:17 +01:00
d . addCallback ( lambda _ : self . _get_session ( ) )
d . addCallback ( lambda _ : add_lbry_file_to_sd_identifier ( self . sd_identifier ) )
d . addCallback ( lambda _ : self . _setup_stream_identifier ( ) )
d . addCallback ( lambda _ : self . _setup_lbry_file_manager ( ) )
d . addCallback ( lambda _ : self . _setup_lbry_file_opener ( ) )
2016-01-26 02:28:05 +01:00
d . addCallback ( lambda _ : self . _setup_query_handlers ( ) )
d . addCallback ( lambda _ : self . _setup_server ( ) )
2015-12-14 18:59:44 +01:00
d . addCallback ( lambda _ : self . _setup_fetcher ( ) )
2016-01-11 21:52:38 +01:00
d . addCallback ( lambda _ : _disp_startup ( ) )
2015-12-06 23:32:17 +01:00
d . callback ( None )
return defer . succeed ( None )
2016-03-14 17:30:22 +01:00
def _initial_setup ( self ) :
2016-03-22 14:40:18 +01:00
return defer . fail ( NotImplementedError ( ) )
2016-03-14 17:30:22 +01:00
2016-03-22 05:03:17 +01:00
def _setup_daemon_settings ( self ) :
self . session_settings = self . default_settings
return defer . succeed ( None )
2016-01-26 02:28:05 +01:00
def _start_server ( self ) :
if self . peer_port is not None :
server_factory = ServerProtocolFactory ( self . session . rate_limiter ,
self . query_handlers ,
self . session . peer_manager )
try :
self . lbry_server_port = reactor . listenTCP ( self . peer_port , server_factory )
except error . CannotListenError as e :
import traceback
log . error ( " Couldn ' t bind to port %d . %s " , self . peer_port , traceback . format_exc ( ) )
raise ValueError ( " %s lbrynet may already be running on your computer. " , str ( e ) )
return defer . succeed ( True )
def _stop_server ( self ) :
if self . lbry_server_port is not None :
self . lbry_server_port , p = None , self . lbry_server_port
return defer . maybeDeferred ( p . stopListening )
else :
return defer . succeed ( True )
def _setup_server ( self ) :
def restore_running_status ( running ) :
if running is True :
return self . _start_server ( )
return defer . succeed ( True )
dl = self . settings . get_server_running_status ( )
dl . addCallback ( restore_running_status )
return dl
def _setup_query_handlers ( self ) :
handlers = [
2016-02-16 19:39:08 +01:00
# CryptBlobInfoQueryHandlerFactory(self.lbry_file_metadata_manager, self.session.wallet,
2016-01-26 02:28:05 +01:00
# self._server_payment_rate_manager),
BlobAvailabilityHandlerFactory ( self . session . blob_manager ) ,
2016-02-16 19:39:08 +01:00
# BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
2016-01-26 02:28:05 +01:00
# self._server_payment_rate_manager),
self . session . wallet . get_wallet_info_query_handler_factory ( ) ,
]
def get_blob_request_handler_factory ( rate ) :
self . blob_request_payment_rate_manager = PaymentRateManager (
self . session . base_payment_rate_manager , rate
)
handlers . append ( BlobRequestHandlerFactory ( self . session . blob_manager , self . session . wallet ,
self . blob_request_payment_rate_manager ) )
d1 = self . settings . get_server_data_payment_rate ( )
d1 . addCallback ( get_blob_request_handler_factory )
dl = defer . DeferredList ( [ d1 ] )
dl . addCallback ( lambda _ : self . _add_query_handlers ( handlers ) )
return dl
def _add_query_handlers ( self , query_handlers ) :
def _set_query_handlers ( statuses ) :
from future_builtins import zip
for handler , ( success , status ) in zip ( query_handlers , statuses ) :
if success is True :
self . query_handlers [ handler ] = status
ds = [ ]
for handler in query_handlers :
ds . append ( self . settings . get_query_handler_status ( handler . get_primary_query_identifier ( ) ) )
dl = defer . DeferredList ( ds )
dl . addCallback ( _set_query_handlers )
return dl
2016-01-11 21:52:38 +01:00
def _shutdown ( self ) :
2016-03-14 17:30:22 +01:00
log . info ( " Closing lbrynet session " )
2016-01-26 02:28:05 +01:00
d = self . _stop_server ( )
2016-01-11 21:52:38 +01:00
if self . session is not None :
2016-01-26 02:28:05 +01:00
d . addCallback ( lambda _ : self . session . shut_down ( ) )
2016-01-11 21:52:38 +01:00
return d
2016-03-22 05:03:17 +01:00
def _update_settings ( self , settings ) :
if not isinstance ( settings [ ' run_on_startup ' ] , bool ) :
return defer . fail ( )
elif not isinstance ( settings [ ' data_rate ' ] , float ) :
return defer . fail ( )
elif not isinstance ( settings [ ' max_key_fee ' ] , float ) :
return defer . fail ( )
elif not isinstance ( settings [ ' default_download_directory ' ] , unicode ) :
return defer . fail ( )
elif not isinstance ( settings [ ' max_upload ' ] , float ) :
return defer . fail ( )
elif not isinstance ( settings [ ' max_download ' ] , float ) :
return defer . fail ( )
self . session_settings [ ' run_on_startup ' ] = settings [ ' run_on_startup ' ]
self . session_settings [ ' data_rate ' ] = settings [ ' data_rate ' ]
self . session_settings [ ' max_key_fee ' ] = settings [ ' max_key_fee ' ]
self . session_settings [ ' default_download_directory ' ] = settings [ ' default_download_directory ' ]
self . session_settings [ ' max_upload ' ] = settings [ ' max_upload ' ]
self . session_settings [ ' max_download ' ] = settings [ ' max_download ' ]
return defer . succeed ( True )
2016-01-07 16:24:55 +01:00
2015-12-14 18:59:44 +01:00
def _setup_fetcher ( self ) :
self . fetcher = FetcherDaemon ( self . session , self . lbry_file_manager , self . lbry_file_metadata_manager ,
self . session . wallet , self . sd_identifier , self . autofetcher_conf )
return defer . succeed ( None )
2015-12-14 16:22:52 +01:00
def _setup_data_directory ( self ) :
2016-03-14 17:30:22 +01:00
log . info ( " Loading databases... " )
2015-12-14 16:22:52 +01:00
if self . created_data_dir :
db_revision = open ( os . path . join ( self . db_dir , " db_revision " ) , mode = ' w ' )
db_revision . write ( str ( self . current_db_revision ) )
db_revision . close ( )
log . debug ( " Created the db revision file: %s " , str ( os . path . join ( self . db_dir , " db_revision " ) ) )
if not os . path . exists ( self . blobfile_dir ) :
os . mkdir ( self . blobfile_dir )
log . debug ( " Created the blobfile directory: %s " , str ( self . blobfile_dir ) )
def _check_db_migration ( self ) :
old_revision = 0
db_revision_file = os . path . join ( self . db_dir , " db_revision " )
if os . path . exists ( db_revision_file ) :
old_revision = int ( open ( db_revision_file ) . read ( ) . strip ( ) )
if old_revision < self . current_db_revision :
from lbrynet . db_migrator import dbmigrator
2016-03-14 17:30:22 +01:00
log . info ( " Upgrading your databases... " )
2015-12-14 16:22:52 +01:00
d = threads . deferToThread ( dbmigrator . migrate_db , self . db_dir , old_revision , self . current_db_revision )
def print_success ( old_dirs ) :
success_string = " Finished upgrading the databases. It is now safe to delete the "
success_string + = " following directories, if you feel like it. It won ' t make any "
success_string + = " difference. \n Anyway here they are: "
for i , old_dir in enumerate ( old_dirs ) :
success_string + = old_dir
if i + 1 < len ( old_dir ) :
success_string + = " , "
2016-03-14 17:30:22 +01:00
log . info ( success_string )
2015-12-14 16:22:52 +01:00
d . addCallback ( print_success )
return d
return defer . succeed ( True )
2015-12-06 23:32:17 +01:00
def _get_settings ( self ) :
d = self . settings . start ( )
d . addCallback ( lambda _ : self . settings . get_lbryid ( ) )
2016-01-26 02:28:05 +01:00
d . addCallback ( self . _set_lbryid )
2016-01-20 03:07:16 +01:00
d . addCallback ( lambda _ : self . _get_lbrycrdd_path ( ) )
2015-12-06 23:32:17 +01:00
return d
2016-01-26 02:28:05 +01:00
def _set_lbryid ( self , lbryid ) :
2015-12-06 23:32:17 +01:00
if lbryid is None :
return self . _make_lbryid ( )
else :
self . lbryid = lbryid
def _make_lbryid ( self ) :
self . lbryid = generate_id ( )
d = self . settings . save_lbryid ( self . lbryid )
return d
def _setup_lbry_file_manager ( self ) :
self . lbry_file_metadata_manager = DBLBRYFileMetadataManager ( self . db_dir )
d = self . lbry_file_metadata_manager . setup ( )
def set_lbry_file_manager ( ) :
2016-03-14 17:30:22 +01:00
self . lbry_file_manager = LBRYFileManager ( self . session ,
self . lbry_file_metadata_manager ,
self . sd_identifier ,
delete_data = True )
2015-12-06 23:32:17 +01:00
return self . lbry_file_manager . setup ( )
d . addCallback ( lambda _ : set_lbry_file_manager ( ) )
return d
def _get_session ( self ) :
def get_default_data_rate ( ) :
d = self . settings . get_default_data_payment_rate ( )
2016-02-16 19:39:08 +01:00
d . addCallback ( lambda rate : { " default_data_payment_rate " : rate if rate is not None else
2016-03-14 17:30:22 +01:00
MIN_BLOB_DATA_PAYMENT_RATE } )
2015-12-06 23:32:17 +01:00
return d
def get_wallet ( ) :
if self . wallet_type == " lbrycrd " :
2016-02-25 23:17:07 +01:00
log . info ( " Using lbrycrd wallet " )
2015-12-06 23:32:17 +01:00
lbrycrdd_path = None
if self . start_lbrycrdd is True :
lbrycrdd_path = self . lbrycrdd_path
if not lbrycrdd_path :
lbrycrdd_path = self . default_lbrycrdd_path
2016-01-07 16:24:55 +01:00
d = defer . succeed ( LBRYcrdWallet ( self . db_dir , wallet_dir = self . wallet_dir , wallet_conf = self . lbrycrd_conf ,
2015-12-06 23:32:17 +01:00
lbrycrdd_path = lbrycrdd_path ) )
2016-02-23 04:32:07 +01:00
elif self . wallet_type == " lbryum " :
2016-02-25 23:17:07 +01:00
log . info ( " Using lbryum wallet " )
2016-02-23 04:32:07 +01:00
d = defer . succeed ( LBRYumWallet ( self . db_dir ) )
elif self . wallet_type == " ptc " :
2016-02-25 23:17:07 +01:00
log . info ( " Using PTC wallet " )
2015-12-06 23:32:17 +01:00
d = defer . succeed ( PTCWallet ( self . db_dir ) )
2016-02-23 04:32:07 +01:00
else :
d = defer . fail ( )
2015-12-06 23:32:17 +01:00
d . addCallback ( lambda wallet : { " wallet " : wallet } )
return d
d1 = get_default_data_rate ( )
d2 = get_wallet ( )
def combine_results ( results ) :
r = { }
for success , result in results :
if success is True :
r . update ( result )
return r
def create_session ( results ) :
self . session = LBRYSession ( results [ ' default_data_payment_rate ' ] , db_dir = self . db_dir , lbryid = self . lbryid ,
blob_dir = self . blobfile_dir , dht_node_port = self . dht_node_port ,
known_dht_nodes = self . known_dht_nodes , peer_port = self . peer_port ,
use_upnp = self . use_upnp , wallet = results [ ' wallet ' ] )
dl = defer . DeferredList ( [ d1 , d2 ] , fireOnOneErrback = True )
dl . addCallback ( combine_results )
dl . addCallback ( create_session )
dl . addCallback ( lambda _ : self . session . setup ( ) )
2016-03-03 00:32:48 +01:00
dl . addCallback ( lambda _ : self . _check_first_run ( ) )
dl . addCallback ( self . _show_first_run_result )
2015-12-06 23:32:17 +01:00
return dl
2016-03-03 00:32:48 +01:00
def _check_first_run ( self ) :
d = self . session . wallet . check_first_run ( )
d . addCallback ( lambda is_first_run : self . _do_first_run ( ) if is_first_run else 0.0 )
return d
def _do_first_run ( self ) :
d = self . session . wallet . get_new_address ( )
def send_request ( url , data ) :
r = requests . post ( url , json = data )
if r . status_code == 200 :
return r . json ( ) [ ' credits_sent ' ]
return 0.0
def log_error ( err ) :
log . warning ( " unable to request free credits. %s " , err . getErrorMessage ( ) )
return 0.0
def request_credits ( address ) :
url = " http://credreq.lbry.io/requestcredits "
data = { " address " : address }
d = threads . deferToThread ( send_request , url , data )
d . addErrback ( log_error )
return d
d . addCallback ( request_credits )
return d
def _show_first_run_result ( self , credits_received ) :
if credits_received != 0.0 :
points_string = locale . format_string ( " %.2f LBC " , ( round ( credits_received , 2 ) , ) , grouping = True )
self . startup_message = " Thank you for testing the alpha version of LBRY! You have been given %s for free because we love you. Please give them a few minutes to show up while you catch up with our blockchain. " % points_string
else :
self . startup_message = " Connected to LBRYnet "
2016-01-20 03:07:16 +01:00
def _get_lbrycrdd_path ( self ) :
2015-12-06 23:32:17 +01:00
def get_lbrycrdd_path_conf_file ( ) :
lbrycrdd_path_conf_path = os . path . join ( os . path . expanduser ( " ~ " ) , " .lbrycrddpath.conf " )
if not os . path . exists ( lbrycrdd_path_conf_path ) :
return " "
lbrycrdd_path_conf = open ( lbrycrdd_path_conf_path )
lines = lbrycrdd_path_conf . readlines ( )
return lines
d = threads . deferToThread ( get_lbrycrdd_path_conf_file )
def load_lbrycrdd_path ( conf ) :
for line in conf :
if len ( line . strip ( ) ) and line . strip ( ) [ 0 ] != " # " :
self . lbrycrdd_path = line . strip ( )
d . addCallback ( load_lbrycrdd_path )
return d
def _setup_stream_identifier ( self ) :
file_saver_factory = LBRYFileSaverFactory ( self . session . peer_finder , self . session . rate_limiter ,
self . session . blob_manager , self . stream_info_manager ,
self . session . wallet , self . download_directory )
self . sd_identifier . add_stream_downloader_factory ( LBRYFileStreamType , file_saver_factory )
file_opener_factory = LBRYFileOpenerFactory ( self . session . peer_finder , self . session . rate_limiter ,
self . session . blob_manager , self . stream_info_manager ,
self . session . wallet )
self . sd_identifier . add_stream_downloader_factory ( LBRYFileStreamType , file_opener_factory )
return defer . succeed ( None )
def _setup_lbry_file_opener ( self ) :
downloader_factory = LBRYFileOpenerFactory ( self . session . peer_finder , self . session . rate_limiter ,
self . session . blob_manager , self . stream_info_manager ,
self . session . wallet )
self . sd_identifier . add_stream_downloader_factory ( LBRYFileStreamType , downloader_factory )
return defer . succeed ( True )
2016-03-17 15:19:36 +01:00
def _download_name ( self , name , timeout = DEFAULT_TIMEOUT ) :
2016-03-17 05:44:04 +01:00
def _disp_file ( f ) :
file_path = os . path . join ( self . download_directory , f . file_name )
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Already downloaded: " + str ( f . stream_hash ) + " --> " + file_path )
return defer . succeed ( f )
2015-12-15 03:49:01 +01:00
2016-01-20 03:07:16 +01:00
def _get_stream ( name ) :
def _disp ( stream ) :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Start stream: " + stream [ ' stream_hash ' ] )
2016-01-20 03:07:16 +01:00
return stream
2016-01-12 18:42:32 +01:00
2016-01-20 03:07:16 +01:00
d = self . session . wallet . get_stream_info_for_name ( name )
2016-01-12 00:01:46 +01:00
stream = GetStream ( self . sd_identifier , self . session , self . session . wallet , self . lbry_file_manager ,
2016-03-17 15:19:36 +01:00
max_key_fee = self . max_key_fee , data_rate = self . data_rate , timeout = timeout )
2016-01-20 03:07:16 +01:00
d . addCallback ( _disp )
2016-01-12 00:01:46 +01:00
d . addCallback ( lambda stream_info : stream . start ( stream_info ) )
2016-01-20 03:07:16 +01:00
d . addCallback ( lambda _ : self . _path_from_name ( name ) )
2016-01-12 00:01:46 +01:00
return d
2016-01-20 03:07:16 +01:00
d = self . _check_history ( name )
d . addCallback ( lambda lbry_file : _get_stream ( name ) if not lbry_file else _disp_file ( lbry_file ) )
2016-03-17 05:44:04 +01:00
d . addCallback ( lambda _ : self . _path_from_name ( name ) )
2016-03-14 17:30:22 +01:00
d . addErrback ( lambda err : defer . fail ( NOT_FOUND ) )
2016-03-17 05:44:04 +01:00
2015-12-15 03:49:01 +01:00
return d
def _resolve_name ( self , name ) :
d = defer . Deferred ( )
d . addCallback ( lambda _ : self . session . wallet . get_stream_info_for_name ( name ) )
2016-01-20 03:07:16 +01:00
d . addErrback ( lambda _ : defer . fail ( UnknownNameError ) )
2015-12-15 03:49:01 +01:00
return d
2016-01-20 03:07:16 +01:00
def _resolve_name_wc ( self , name ) :
d = defer . Deferred ( )
d . addCallback ( lambda _ : self . session . wallet . get_stream_info_for_name ( name ) )
d . addErrback ( lambda _ : defer . fail ( UnknownNameError ) )
d . callback ( None )
return d
2016-01-12 00:01:46 +01:00
2016-01-20 03:07:16 +01:00
def _check_history ( self , name ) :
def _get_lbry_file ( path ) :
f = open ( path , ' r ' )
l = json . loads ( f . read ( ) )
f . close ( )
2016-03-17 05:44:04 +01:00
2016-01-20 03:07:16 +01:00
file_name = l [ ' stream_name ' ] . decode ( ' hex ' )
2016-03-17 05:44:04 +01:00
for lbry_file in self . lbry_file_manager . lbry_files :
if lbry_file . stream_name == file_name :
if sys . platform == " darwin " :
if os . path . isfile ( os . path . join ( self . download_directory , lbry_file . stream_name ) ) :
return lbry_file
else :
return False
else :
return lbry_file
2016-02-11 14:32:48 +01:00
else :
2016-03-17 05:44:04 +01:00
return False
2016-01-20 03:07:16 +01:00
def _check ( info ) :
stream_hash = info [ ' stream_hash ' ]
path = os . path . join ( self . blobfile_dir , stream_hash )
if os . path . isfile ( path ) :
2016-02-25 23:17:07 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Search for lbry_file, returning: " + stream_hash )
2016-01-20 03:07:16 +01:00
return defer . succeed ( _get_lbry_file ( path ) )
2016-01-12 00:01:46 +01:00
else :
2016-02-25 23:17:07 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Search for lbry_file didn ' t return anything " )
2016-01-20 03:07:16 +01:00
return defer . succeed ( False )
d = self . _resolve_name ( name )
2016-03-17 05:44:04 +01:00
d . addCallback ( _check )
2016-01-20 03:07:16 +01:00
d . callback ( None )
return d
def _delete_lbry_file ( self , lbry_file ) :
d = self . lbry_file_manager . delete_lbry_file ( lbry_file )
def finish_deletion ( lbry_file ) :
d = lbry_file . delete_data ( )
d . addCallback ( lambda _ : _delete_stream_data ( lbry_file ) )
return d
2016-01-12 18:42:32 +01:00
2016-01-20 03:07:16 +01:00
def _delete_stream_data ( lbry_file ) :
s_h = lbry_file . stream_hash
d = self . lbry_file_manager . get_count_for_stream_hash ( s_h )
# TODO: could possibly be a timing issue here
d . addCallback ( lambda c : self . stream_info_manager . delete_stream ( s_h ) if c == 0 else True )
2016-03-14 17:30:22 +01:00
d . addCallback ( lambda _ : os . remove ( os . path . join ( self . download_directory , lbry_file . file_name ) ) if
os . path . isfile ( os . path . join ( self . download_directory , lbry_file . file_name ) ) else defer . succeed ( None ) )
2016-01-20 03:07:16 +01:00
return d
d . addCallback ( lambda _ : finish_deletion ( lbry_file ) )
return d
def _path_from_name ( self , name ) :
d = self . _check_history ( name )
d . addCallback ( lambda lbry_file : { ' stream_hash ' : lbry_file . stream_hash ,
' path ' : os . path . join ( self . download_directory , lbry_file . file_name ) }
2016-03-17 05:44:04 +01:00
if lbry_file else defer . fail ( UnknownNameError ) )
2016-01-20 03:07:16 +01:00
return d
def _path_from_lbry_file ( self , lbry_file ) :
if lbry_file :
r = { ' stream_hash ' : lbry_file . stream_hash ,
' path ' : os . path . join ( self . download_directory , lbry_file . file_name ) }
return defer . succeed ( r )
else :
return defer . fail ( UnknownNameError )
2016-01-12 00:01:46 +01:00
2016-01-24 06:10:22 +01:00
def _get_est_cost ( self , name ) :
def _check_est ( d , name ) :
if type ( d . result ) is float :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Cost est for lbry:// " + name + " : " + str ( d . result ) + " LBC " )
2016-01-24 06:10:22 +01:00
else :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Timeout estimating cost for lbry:// " + name + " , using key fee " )
2016-01-24 06:10:22 +01:00
d . cancel ( )
return defer . succeed ( None )
def _add_key_fee ( data_cost ) :
d = self . session . wallet . get_stream_info_for_name ( name )
2016-02-15 16:12:49 +01:00
d . addCallback ( lambda info : data_cost + info [ ' key_fee ' ] if ' key_fee ' in info . keys ( ) else data_cost )
2016-01-24 06:10:22 +01:00
return d
d = self . session . wallet . get_stream_info_for_name ( name )
2016-01-26 05:59:57 +01:00
d . addCallback ( lambda info : download_sd_blob ( self . session , info [ ' stream_hash ' ] ,
self . blob_request_payment_rate_manager ) )
2016-01-24 06:10:22 +01:00
d . addCallback ( self . sd_identifier . get_metadata_for_sd_blob )
d . addCallback ( lambda metadata : metadata . validator . info_to_show ( ) )
2016-02-19 02:41:23 +01:00
d . addCallback ( lambda info : int ( dict ( info ) [ ' stream_size ' ] ) / 1000000 * self . data_rate )
2016-01-24 06:10:22 +01:00
d . addCallback ( _add_key_fee )
d . addErrback ( lambda _ : _add_key_fee ( 0.0 ) )
2016-02-15 16:12:49 +01:00
reactor . callLater ( self . search_timeout , _check_est , d , name )
2016-01-24 06:10:22 +01:00
return d
2016-03-14 17:30:22 +01:00
def _render_response ( self , result , code ) :
return json . dumps ( { ' result ' : result , ' code ' : code } )
2016-03-17 05:44:04 +01:00
# def _log_to_slack(self, msg):
# URL = "https://hooks.slack.com/services/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"
# h = httplib2.Http()
# h.request(URL, 'POST', json.dumps({"text": msg}), headers={'Content-Type': 'application/json'})
2016-03-14 17:30:22 +01:00
def jsonrpc_is_running ( self ) :
"""
Returns a startup message when the daemon starts , after which it will return True
"""
2016-03-03 00:32:48 +01:00
if self . startup_message != " " and self . announced_startup == False :
self . announced_startup = True
2016-03-14 17:30:22 +01:00
return self . _render_response ( self . startup_message , OK_CODE )
2016-03-03 00:32:48 +01:00
elif self . announced_startup :
2016-03-14 17:30:22 +01:00
return self . _render_response ( True , OK_CODE )
2016-03-03 00:32:48 +01:00
else :
2016-03-14 17:30:22 +01:00
return self . _render_response ( False , OK_CODE )
2016-02-19 02:41:23 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_get_settings ( self ) :
2016-01-07 16:24:55 +01:00
"""
Get LBRY payment settings
2016-01-24 06:10:22 +01:00
@return { ' data_rate ' : float , ' max_key_fee ' : float }
2016-01-07 16:24:55 +01:00
"""
2016-01-12 18:42:32 +01:00
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Get daemon settings " )
return self . _render_response ( self . session_settings , OK_CODE )
2016-01-07 16:24:55 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_set_settings ( self , p ) :
2016-01-24 06:10:22 +01:00
"""
Set LBRY payment settings
2016-03-14 17:30:22 +01:00
@param settings : { ' settings ' : { ' data_rate ' : float , ' max_key_fee ' : float } }
2016-01-24 06:10:22 +01:00
"""
2016-03-22 05:03:17 +01:00
d = self . _update_settings ( p )
2016-01-07 16:24:55 +01:00
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Set daemon settings " )
return self . _render_response ( True , OK_CODE )
2016-01-07 16:24:55 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_start_fetcher ( self ) :
2015-12-14 18:59:44 +01:00
"""
2016-03-14 17:30:22 +01:00
Start automatically downloading new name claims as they happen
@return : confirmation message
2015-12-14 18:59:44 +01:00
"""
self . fetcher . start ( )
2016-02-25 23:17:07 +01:00
log . info ( ' [ ' + str ( datetime . now ( ) ) + ' ] Start autofetcher ' )
2016-03-17 05:44:04 +01:00
# self._log_to_slack('[' + str(datetime.now()) + '] Start autofetcher')
2016-03-14 17:30:22 +01:00
return self . _render_response ( " Started autofetching claims " , OK_CODE )
2015-12-14 18:59:44 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_stop_fetcher ( self ) :
2015-12-14 18:59:44 +01:00
"""
2016-03-14 17:30:22 +01:00
Stop automatically downloading new name claims as they happen
@return : confirmation message
2015-12-14 18:59:44 +01:00
"""
self . fetcher . stop ( )
2016-02-25 23:17:07 +01:00
log . info ( ' [ ' + str ( datetime . now ( ) ) + ' ] Stop autofetcher ' )
2016-03-14 17:30:22 +01:00
return self . _render_response ( " Stopped autofetching claims " , OK_CODE )
2015-12-14 18:59:44 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_fetcher_status ( self ) :
2015-12-14 18:59:44 +01:00
"""
2016-03-14 17:30:22 +01:00
Get fetcher status
@return : True / False
2015-12-14 18:59:44 +01:00
"""
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Get fetcher status " )
return self . _render_response ( self . fetcher . check_if_running ( ) , OK_CODE )
2015-12-14 18:59:44 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_get_balance ( self ) :
2015-12-06 23:32:17 +01:00
"""
Get LBC balance
2016-03-14 17:30:22 +01:00
@return : balance
2015-12-06 23:32:17 +01:00
"""
2015-12-14 18:59:44 +01:00
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Get balance " )
return self . _render_response ( self . session . wallet . wallet_balance , OK_CODE )
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_stop ( self ) :
2015-12-06 23:32:17 +01:00
"""
2016-01-24 06:10:22 +01:00
Stop lbrynet - daemon
2016-03-14 17:30:22 +01:00
@return : shutdown message
2015-12-06 23:32:17 +01:00
"""
2016-01-11 21:52:38 +01:00
def _disp_shutdown ( ) :
2016-03-14 17:30:22 +01:00
log . info ( " Shutting down lbrynet daemon " )
2016-01-11 21:52:38 +01:00
d = self . _shutdown ( )
d . addCallback ( lambda _ : _disp_shutdown ( ) )
2016-01-26 02:28:05 +01:00
d . addCallback ( lambda _ : reactor . callLater ( 1.0 , reactor . stop ) )
2016-01-11 21:52:38 +01:00
2016-03-14 17:30:22 +01:00
return self . _render_response ( " Shutting down " , OK_CODE )
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_get_lbry_files ( self ) :
2015-12-15 03:49:01 +01:00
"""
Get LBRY files
2016-01-24 06:10:22 +01:00
@return : List of managed LBRY files
2015-12-15 03:49:01 +01:00
"""
2016-01-13 15:42:30 +01:00
r = [ ]
for f in self . lbry_file_manager . lbry_files :
if f . key :
2016-01-17 07:49:17 +01:00
t = { ' completed ' : f . completed , ' file_name ' : f . file_name , ' key ' : binascii . b2a_hex ( f . key ) ,
' points_paid ' : f . points_paid , ' stopped ' : f . stopped , ' stream_hash ' : f . stream_hash ,
' stream_name ' : f . stream_name , ' suggested_file_name ' : f . suggested_file_name ,
' upload_allowed ' : f . upload_allowed }
2016-01-13 15:42:30 +01:00
else :
2016-01-17 07:49:17 +01:00
t = { ' completed ' : f . completed , ' file_name ' : f . file_name , ' key ' : None , ' points_paid ' : f . points_paid ,
' stopped ' : f . stopped , ' stream_hash ' : f . stream_hash , ' stream_name ' : f . stream_name ,
' suggested_file_name ' : f . suggested_file_name , ' upload_allowed ' : f . upload_allowed }
2016-01-13 15:42:30 +01:00
r . append ( json . dumps ( t ) )
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Get LBRY files " )
return self . _render_response ( r , OK_CODE )
2015-12-15 03:49:01 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_resolve_name ( self , p ) :
2015-12-06 23:32:17 +01:00
"""
Resolve stream info from a LBRY uri
2015-12-15 03:49:01 +01:00
2016-03-14 17:30:22 +01:00
@param : { ' name ' : name to look up }
2016-01-24 06:10:22 +01:00
@return : info for name claim
2015-12-06 23:32:17 +01:00
"""
2016-03-14 17:30:22 +01:00
params = Bunch ( p )
2015-12-06 23:32:17 +01:00
def _disp ( info ) :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Resolved info: " + info [ ' stream_hash ' ] )
return self . _render_response ( info , OK_CODE )
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
d = self . _resolve_name ( params . name )
d . addCallbacks ( _disp , lambda _ : self . _render_response ( ' error ' , NOT_FOUND ) )
2015-12-06 23:32:17 +01:00
d . callback ( None )
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_get ( self , p ) :
2015-12-06 23:32:17 +01:00
"""
Download stream from a LBRY uri
2015-12-15 03:49:01 +01:00
@param : name
2016-01-24 06:10:22 +01:00
@return : { ' stream_hash ' : hex string , ' path ' : path of download }
2015-12-06 23:32:17 +01:00
"""
2016-03-14 17:30:22 +01:00
params = Bunch ( p )
2015-12-06 23:32:17 +01:00
2016-03-17 15:19:36 +01:00
if ' timeout ' not in p . keys ( ) :
params . timeout = DEFAULT_TIMEOUT
2016-03-14 17:30:22 +01:00
if params . name :
2016-03-17 15:19:36 +01:00
d = self . _download_name ( params . name , timeout = params . timeout )
2016-03-14 17:30:22 +01:00
d . addCallbacks ( lambda message : self . _render_response ( message , OK_CODE ) ,
lambda err : self . _render_response ( ' error ' , NOT_FOUND ) )
2016-01-24 06:26:41 +01:00
else :
2016-03-14 17:30:22 +01:00
d = self . _render_response ( ' error ' , BAD_REQUEST )
2016-03-17 05:44:04 +01:00
2015-12-08 06:37:49 +01:00
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_stop_lbry_file ( self , p ) :
params = Bunch ( p )
2016-01-20 03:07:16 +01:00
try :
2016-03-14 17:30:22 +01:00
lbry_file = [ f for f in self . lbry_file_manager . lbry_files if f . stream_hash == params . stream_hash ] [ 0 ]
2016-01-20 03:07:16 +01:00
except IndexError :
return defer . fail ( UnknownNameError )
2015-12-15 03:49:01 +01:00
2016-01-20 03:07:16 +01:00
if not lbry_file . stopped :
d = self . lbry_file_manager . toggle_lbry_file_running ( lbry_file )
2016-03-14 17:30:22 +01:00
d . addCallback ( lambda _ : self . _render_response ( " Stream has been stopped " , OK_CODE ) )
d . addErrback ( lambda err : self . _render_response ( err . getTraceback ( ) , ) )
2016-01-20 03:07:16 +01:00
return d
else :
2016-03-14 17:30:22 +01:00
return json . dumps ( { ' result ' : ' Stream was already stopped ' } )
def jsonrpc_start_lbry_file ( self , p ) :
params = Bunch ( p )
2015-12-15 03:49:01 +01:00
2016-01-20 03:07:16 +01:00
try :
2016-03-14 17:30:22 +01:00
lbry_file = [ f for f in self . lbry_file_manager . lbry_files if f . stream_hash == params . stream_hash ] [ 0 ]
2016-01-20 03:07:16 +01:00
except IndexError :
return defer . fail ( UnknownNameError )
2015-12-15 03:49:01 +01:00
2016-01-20 03:07:16 +01:00
if lbry_file . stopped :
d = self . lbry_file_manager . toggle_lbry_file_running ( lbry_file )
d . callback ( None )
2016-03-14 17:30:22 +01:00
return json . dumps ( { ' result ' : ' Stream started ' } )
2016-03-08 18:15:49 +01:00
else :
2016-03-14 17:30:22 +01:00
return json . dumps ( { ' result ' : ' Stream was already running ' } )
2016-01-17 05:06:24 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_search_nametrie ( self , p ) :
2016-01-24 06:10:22 +01:00
"""
Search the nametrie for claims beginning with search
2016-03-14 17:30:22 +01:00
@param { ' search ' : search string }
@return : List of search results
2016-01-24 06:10:22 +01:00
"""
2016-03-21 15:12:01 +01:00
2016-03-14 17:30:22 +01:00
params = Bunch ( p )
2016-01-24 06:10:22 +01:00
2016-01-17 05:06:24 +01:00
def _clean ( n ) :
t = [ ]
for i in n :
if i [ 0 ] :
2016-01-24 06:10:22 +01:00
if i [ 1 ] [ 0 ] [ 0 ] and i [ 1 ] [ 1 ] [ 0 ] and i [ 1 ] [ 2 ] [ 0 ] :
2016-01-17 05:06:24 +01:00
i [ 1 ] [ 0 ] [ 1 ] [ ' value ' ] = str ( i [ 1 ] [ 0 ] [ 1 ] [ ' value ' ] )
2016-01-24 06:10:22 +01:00
t . append ( [ i [ 1 ] [ 0 ] [ 1 ] , i [ 1 ] [ 1 ] [ 1 ] , i [ 1 ] [ 2 ] [ 1 ] ] )
2016-01-17 05:06:24 +01:00
return t
2016-02-17 05:10:26 +01:00
def resolve_claims ( claims ) :
ds = [ ]
for claim in claims :
d1 = defer . succeed ( claim )
d2 = self . _resolve_name_wc ( claim [ ' name ' ] )
d3 = self . _get_est_cost ( claim [ ' name ' ] )
dl = defer . DeferredList ( [ d1 , d2 , d3 ] , consumeErrors = True )
ds . append ( dl )
return defer . DeferredList ( ds )
2016-01-17 05:06:24 +01:00
def _disp ( results ) :
2016-03-21 17:06:19 +01:00
log . info ( ' [ ' + str ( datetime . now ( ) ) + ' ] Found ' + str ( len ( results ) ) + ' search results ' )
consolidated_results = [ ]
2016-02-25 23:17:07 +01:00
for r in results :
2016-03-21 17:06:19 +01:00
t = { }
t . update ( r [ 0 ] )
if ' name ' in r [ 1 ] . keys ( ) :
r [ 1 ] [ ' stream_name ' ] = r [ 1 ] [ ' name ' ]
del r [ 1 ] [ ' name ' ]
t . update ( r [ 1 ] )
t [ ' cost_est ' ] = r [ 2 ]
consolidated_results . append ( t )
# log.info(str(t))
return self . _render_response ( consolidated_results , OK_CODE )
2016-01-17 05:06:24 +01:00
2016-03-14 17:30:22 +01:00
log . info ( ' [ ' + str ( datetime . now ( ) ) + ' ] Search nametrie: ' + params . search )
2016-01-17 05:06:24 +01:00
2016-02-17 05:10:26 +01:00
d = self . session . wallet . get_nametrie ( )
2016-03-14 17:30:22 +01:00
d . addCallback ( lambda trie : [ claim for claim in trie if claim [ ' name ' ] . startswith ( params . search ) and ' txid ' in claim ] )
2016-02-17 05:10:26 +01:00
d . addCallback ( lambda claims : claims [ : self . max_search_results ] )
d . addCallback ( resolve_claims )
2016-01-17 05:06:24 +01:00
d . addCallback ( _clean )
d . addCallback ( _disp )
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_delete_lbry_file ( self , p ) :
"""
Delete a lbry file
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
@param { ' file_name ' : string }
@return : confirmation message
"""
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
params = Bunch ( p )
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
def _disp ( file_name ) :
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Deleted: " + file_name )
return self . _render_response ( " Deleted: " + file_name , OK_CODE )
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
lbry_files = [ self . _delete_lbry_file ( f ) for f in self . lbry_file_manager . lbry_files if params . file_name == f . file_name ]
d = defer . DeferredList ( lbry_files )
d . addCallback ( lambda _ : _disp ( params . file_name ) )
return d
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_publish ( self , p ) :
"""
Make a new name claim
2016-01-21 04:00:28 +01:00
2016-03-14 17:30:22 +01:00
@param :
@return :
"""
2016-03-17 05:44:04 +01:00
2016-03-14 17:30:22 +01:00
params = Bunch ( p )
2016-03-17 05:44:04 +01:00
metadata_fields = { " name " : unicode , " file_path " : unicode , " bid " : float , " author " : unicode , " title " : unicode ,
" description " : unicode , " thumbnail " : unicode , " key_fee " : float , " key_fee_address " : unicode ,
" content_license " : unicode , " sources " : dict }
2016-03-14 17:30:22 +01:00
for k in metadata_fields . keys ( ) :
if k in params . __dict__ . keys ( ) :
2016-03-17 05:44:04 +01:00
if isinstance ( params . __dict__ [ k ] , metadata_fields [ k ] ) :
if type ( params . __dict__ [ k ] ) == unicode :
metadata_fields [ k ] = str ( params . __dict__ [ k ] )
else :
metadata_fields [ k ] = params . __dict__ [ k ]
else :
metadata_fields [ k ] = None
2016-03-14 17:30:22 +01:00
else :
metadata_fields [ k ] = None
2016-01-24 06:10:22 +01:00
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Publish: " , metadata_fields )
2016-02-25 23:17:07 +01:00
2016-01-21 04:00:28 +01:00
p = Publisher ( self . session , self . lbry_file_manager , self . session . wallet )
2016-03-17 05:44:04 +01:00
d = p . start ( name = metadata_fields [ ' name ' ] ,
file_path = metadata_fields [ ' file_path ' ] ,
bid = metadata_fields [ ' bid ' ] ,
title = metadata_fields [ ' title ' ] ,
description = metadata_fields [ ' description ' ] ,
thumbnail = metadata_fields [ ' thumbnail ' ] ,
key_fee = metadata_fields [ ' key_fee ' ] ,
key_fee_address = metadata_fields [ ' key_fee_address ' ] ,
content_license = metadata_fields [ ' content_license ' ] ,
author = metadata_fields [ ' author ' ] ,
sources = metadata_fields [ ' sources ' ] )
2016-03-14 17:30:22 +01:00
d . addCallbacks ( lambda msg : self . _render_response ( msg , OK_CODE ) ,
lambda err : self . _render_response ( err . getTraceback ( ) , BAD_REQUEST ) )
2016-01-21 04:00:28 +01:00
return d
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_abandon_name ( self , p ) :
"""
Abandon and reclaim credits from a name claim
@param : { ' txid ' : string }
@return : txid
"""
params = Bunch ( p )
2016-01-26 02:28:05 +01:00
def _disp ( txid , tx ) :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Abandoned name claim tx " + txid )
return self . _render_response ( txid , OK_CODE )
2016-01-26 02:28:05 +01:00
d = defer . Deferred ( )
2016-03-14 17:30:22 +01:00
d . addCallback ( lambda _ : self . session . wallet . abandon_name ( params . txid ) )
d . addCallback ( lambda tx : _disp ( params . txid , tx ) )
d . addErrback ( lambda err : self . _render_response ( err . getTraceback ( ) , BAD_REQUEST ) )
2016-01-26 02:28:05 +01:00
d . callback ( None )
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_get_name_claims ( self ) :
"""
Get name claims
@return : list of name claims
"""
2016-01-26 02:28:05 +01:00
def _clean ( claims ) :
for c in claims :
for k in c . keys ( ) :
2016-02-11 14:32:48 +01:00
if isinstance ( c [ k ] , Decimal ) :
2016-01-26 02:28:05 +01:00
c [ k ] = float ( c [ k ] )
2016-03-14 17:30:22 +01:00
return self . _render_response ( claims , OK_CODE )
2016-01-26 02:28:05 +01:00
d = self . session . wallet . get_name_claims ( )
d . addCallback ( _clean )
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_get_time_behind_blockchain ( self ) :
"""
Get time behind blockchain
@return : time behind blockchain
"""
2016-01-26 02:28:05 +01:00
d = self . session . wallet . get_most_recent_blocktime ( )
2016-03-22 14:40:18 +01:00
d . addCallback ( get_time_behind_blockchain )
2016-03-14 17:30:22 +01:00
d . addCallbacks ( lambda result : self . _render_response ( result , OK_CODE ) ,
lambda result : self . _render_response ( result , BAD_REQUEST ) )
2016-01-26 02:28:05 +01:00
return d
2016-03-14 17:30:22 +01:00
def jsonrpc_get_new_address ( self ) :
"""
Generate a new wallet address
@return : new wallet address
"""
2016-02-25 23:17:07 +01:00
def _disp ( address ) :
2016-03-14 17:30:22 +01:00
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Got new wallet address: " + address )
return json . dumps ( self . _render_response ( address , OK_CODE ) )
2016-02-25 23:17:07 +01:00
d = self . session . wallet . get_new_address ( )
d . addCallback ( _disp )
return d
2016-03-14 17:30:22 +01:00
# def jsonrpc_update_name(self, metadata):
2016-02-11 14:32:48 +01:00
# def _disp(x):
# print x
# return x
#
# metadata = json.loads(metadata)
#
# required = ['name', 'file_path', 'bid']
#
# for r in required:
# if not r in metadata.keys():
# return defer.fail()
#
# d = defer.Deferred()
# d.addCallback(lambda _: self.session.wallet.update_name(metadata))
# d.addCallback(_disp)
# d.callback(None)
#
# return d
2016-03-14 17:30:22 +01:00
def jsonrpc_toggle_fetcher_verbose ( self ) :
2016-01-27 17:02:57 +01:00
if self . fetcher . verbose :
self . fetcher . verbose = False
else :
self . fetcher . verbose = True
2016-03-14 17:30:22 +01:00
return self . _render_response ( self . fetcher . verbose , OK_CODE )
2016-01-27 17:02:57 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc_check_for_new_version ( self ) :
2016-03-03 00:32:48 +01:00
def _check_for_updates ( package ) :
git_version = subprocess . check_output ( " git ls-remote " + package [ ' git ' ] + " | grep HEAD | cut -f 1 " , shell = True )
up_to_date = False
if os . path . isfile ( package [ ' version_file ' ] ) :
f = open ( package [ ' version_file ' ] , ' r ' )
current_version = f . read ( )
f . close ( )
2016-02-16 20:48:10 +01:00
2016-03-03 00:32:48 +01:00
if git_version == current_version :
r = package [ ' name ' ] + " is up to date "
up_to_date = True
else :
r = package [ ' name ' ] + " version is out of date "
2016-02-16 20:48:10 +01:00
else :
2016-03-03 00:32:48 +01:00
r = " Unknown version of " + package [ ' name ' ]
return ( up_to_date , r )
package_infos = {
" lbrynet " : { " name " : " LBRYnet " ,
" git " : " https://github.com/lbryio/lbry.git " ,
" version_file " : os . path . join ( self . db_dir , " .lbrynet_version " ) ,
" clone " : " .lbrygit " ,
} ,
" lbryum " : { " name " : " lbryum " ,
" git " : " https://github.com/lbryio/lbryum.git " ,
" version_file " : os . path . join ( self . db_dir , " .lbryum_version " ) ,
" clone " : " .lbryumgit " ,
} ,
" lbry " : { " name " : " LBRY " ,
" git " : " https://github.com/jackrobison/lbrynet-app.git " ,
" version_file " : os . path . join ( self . db_dir , " .lbry_app_version " ) ,
" clone " : None ,
} ,
}
2016-03-14 17:30:22 +01:00
r = [ _check_for_updates ( package_infos [ p ] ) for p in package_infos . keys ( ) ]
log . info ( " [ " + str ( datetime . now ( ) ) + " ] Check for new version: " + json . dumps ( r ) )
return self . _render_response ( r , OK_CODE )
2016-01-24 06:10:22 +01:00
2016-03-14 17:30:22 +01:00
def jsonrpc___dir__ ( self ) :
2016-03-08 18:15:49 +01:00
return [ ' is_running ' , ' get_settings ' , ' set_settings ' , ' start_fetcher ' , ' stop_fetcher ' , ' fetcher_status ' ,
' get_balance ' , ' stop ' , ' get_lbry_files ' , ' resolve_name ' , ' get ' , ' search_nametrie ' ,
' delete_lbry_file ' , ' check ' , ' publish ' , ' abandon_name ' , ' get_name_claims ' ,
' get_time_behind_blockchain ' , ' get_new_address ' , ' toggle_fetcher_verbose ' , ' check_for_new_version ' ]
2016-02-29 19:25:47 +01:00
2016-03-14 17:30:22 +01:00
class LBRYDaemonCommandHandler ( object ) :
def __init__ ( self , command ) :
self . _api = jsonrpc . Proxy ( API_CONNECTION_STRING )
self . command = command
def run ( self , params = None ) :
if params :
d = self . _api . callRemote ( self . command , params )
else :
d = self . _api . callRemote ( self . command )
return d
class LBRYindex ( resource . Resource ) :
2016-03-22 15:37:13 +01:00
def __init__ ( self , ui_dir ) :
resource . Resource . __init__ ( self )
self . ui_dir = ui_dir
2016-03-14 17:30:22 +01:00
isLeaf = False
def _delayed_render ( self , request , results ) :
request . write ( str ( results ) )
request . finish ( )
2016-03-22 05:03:17 +01:00
def getChild ( self , name , request ) :
if name == ' ' :
return self
return resource . Resource . getChild ( self , name , request )
2016-03-14 17:30:22 +01:00
def render_GET ( self , request ) :
def _disp ( r ) :
log . info ( r )
return " <html><table style= ' width:100 % ' > " + ' ' . join ( r ) + " </html> "
2016-03-22 15:37:13 +01:00
return static . File ( os . path . join ( self . ui_dir , " index.html " ) ) . render_GET ( request )
2016-03-14 17:30:22 +01:00
2016-03-22 05:03:17 +01:00
class LBRYFileRender ( resource . Resource ) :
2016-03-14 17:30:22 +01:00
isLeaf = False
2016-03-22 05:03:17 +01:00
def _render_path ( self , path ) :
return r ' <html><center><video src= " ' + path + r ' " controls autoplay width= " 960 " height= " 720 " ></center></html> '
2016-03-14 17:30:22 +01:00
def _delayed_render ( self , request , results ) :
request . write ( str ( results ) )
request . finish ( )
2016-03-22 05:03:17 +01:00
def render_GET ( self , request ) :
if ' name ' in request . args . keys ( ) :
api = jsonrpc . Proxy ( API_CONNECTION_STRING )
d = api . callRemote ( " get " , { ' name ' : request . args [ ' name ' ] [ 0 ] } )
d . addCallback ( lambda response : self . _delayed_render ( request , self . _render_path ( json . loads ( response ) [ ' result ' ] [ ' path ' ] ) )
if json . loads ( response ) [ ' code ' ] == 200
else self . _delayed_render ( request , " Error " ) )
return server . NOT_DONE_YET
else :
self . _delayed_render ( request , " Error " )
return server . NOT_DONE_YET
# class LBRYFilePage(resource.Resource):
# isLeaf = False
#
# def _delayed_render(self, request, results):
# request.write(str(results))
# request.finish()
#
# h = "<tr><td><a href=/webapi?function=delete_lbry_file&file_name=%s>%s</a></td></tr>"
#
# d = LBRYDaemonCommandHandler('get_lbry_files').run()
# d.addCallback(lambda r: json.loads(r)['result'])
# d.addCallback(lambda lbry_files: [h % (json.loads(lbry_file)['file_name'], json.loads(lbry_file)['file_name']) for lbry_file in lbry_files])
# d.addCallback(lambda r: "<html><table style='width:100%'>" + ''.join(r) + "</html>")
# d.addCallbacks(lambda results: self._delayed_render(request, results),
# lambda err: self._delayed_render(request, err.getTraceback()))
#
# return server.NOT_DONE_YET
2016-02-29 19:25:47 +01:00
2016-02-19 02:41:23 +01:00
2016-03-14 17:30:22 +01:00
class LBRYDaemonWeb ( resource . Resource ) :
isLeaf = False
2016-02-25 23:17:07 +01:00
2016-03-14 17:30:22 +01:00
def _delayed_render ( self , request , results ) :
request . write ( str ( results ) )
request . setResponseCode ( json . loads ( results ) [ ' code ' ] )
request . finish ( )
2016-02-23 04:32:07 +01:00
2016-03-14 17:30:22 +01:00
def render_GET ( self , request ) :
func = request . args [ ' function ' ] [ 0 ]
del request . args [ ' function ' ]
2015-12-06 23:32:17 +01:00
2016-03-14 17:30:22 +01:00
p = { }
for k in request . args . keys ( ) :
p [ k ] = request . args [ k ] [ 0 ]
2016-02-19 02:45:16 +01:00
2016-03-14 17:30:22 +01:00
d = LBRYDaemonCommandHandler ( func ) . run ( p )
d . addCallbacks ( lambda results : self . _delayed_render ( request , results ) ,
lambda err : self . _delayed_render ( request , json . dumps ( { ' message ' : err . getTraceback ( ) , ' code ' : BAD_REQUEST } ) ) )
2016-02-29 19:25:47 +01:00
2016-03-22 15:37:13 +01:00
return server . NOT_DONE_YET