2015-12-20 09:29:13 +01:00
|
|
|
import sys
|
2015-08-20 17:27:15 +02:00
|
|
|
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, ILBRYWallet
|
|
|
|
from lbrynet.core.client.ClientRequest import ClientRequest
|
|
|
|
from lbrynet.core.Error import UnknownNameError, InvalidStreamInfoError, RequestCanceledError
|
2015-12-15 20:42:29 +01:00
|
|
|
from lbrynet.core.Error import InsufficientFundsError
|
2015-10-28 06:38:01 +01:00
|
|
|
from lbrynet.core.sqlite_helpers import rerun_if_locked
|
2016-06-27 23:07:59 +02:00
|
|
|
from lbrynet.conf import BASE_METADATA_FIELDS, SOURCE_TYPES, OPTIONAL_METADATA_FIELDS
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
from lbryum import SimpleConfig, Network
|
2016-07-11 04:21:50 +02:00
|
|
|
from lbryum.lbrycrd import COIN, TYPE_ADDRESS
|
2016-02-19 06:44:08 +01:00
|
|
|
from lbryum.wallet import WalletStorage, Wallet
|
|
|
|
from lbryum.commands import known_commands, Commands
|
|
|
|
from lbryum.transaction import Transaction
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
|
|
|
from twisted.internet import threads, reactor, defer, task
|
|
|
|
from twisted.python.failure import Failure
|
2015-10-28 06:38:01 +01:00
|
|
|
from twisted.enterprise import adbapi
|
2015-08-20 17:27:15 +02:00
|
|
|
from collections import defaultdict, deque
|
|
|
|
from zope.interface import implements
|
|
|
|
from decimal import Decimal
|
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import json
|
|
|
|
import subprocess
|
|
|
|
import socket
|
|
|
|
import time
|
|
|
|
import os
|
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2015-10-22 08:23:12 +02:00
|
|
|
alert = logging.getLogger("lbryalert." + __name__)
|
2015-09-08 21:42:56 +02:00
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class ReservedPoints(object):
|
|
|
|
def __init__(self, identifier, amount):
|
|
|
|
self.identifier = identifier
|
|
|
|
self.amount = amount
|
|
|
|
|
|
|
|
|
2015-09-22 19:06:20 +02:00
|
|
|
def _catch_connection_error(f):
|
|
|
|
def w(*args):
|
|
|
|
try:
|
|
|
|
return f(*args)
|
|
|
|
except socket.error:
|
|
|
|
raise ValueError("Unable to connect to an lbrycrd server. Make sure an lbrycrd server " +
|
|
|
|
"is running and that this application can connect to it.")
|
|
|
|
return w
|
|
|
|
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
class LBRYWallet(object):
|
2015-08-20 17:27:15 +02:00
|
|
|
"""This class implements the LBRYWallet interface for the LBRYcrd payment system"""
|
|
|
|
implements(ILBRYWallet)
|
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
_FIRST_RUN_UNKNOWN = 0
|
|
|
|
_FIRST_RUN_YES = 1
|
|
|
|
_FIRST_RUN_NO = 2
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def __init__(self, db_dir):
|
2015-10-29 01:59:07 +01:00
|
|
|
|
2015-10-28 06:38:01 +01:00
|
|
|
self.db_dir = db_dir
|
|
|
|
self.db = None
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
self.wallet_balance = Decimal(0.0)
|
|
|
|
self.total_reserved_points = Decimal(0.0)
|
|
|
|
self.peer_addresses = {} # {Peer: string}
|
|
|
|
self.queued_payments = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
|
|
|
self.expected_balances = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
|
|
|
self.current_address_given_to_peer = {} # {Peer: address(string)}
|
|
|
|
self.expected_balance_at_time = deque() # (Peer, address(string), amount(Decimal), time(datetime), count(int),
|
|
|
|
# incremental_amount(float))
|
|
|
|
self.max_expected_payment_time = datetime.timedelta(minutes=3)
|
|
|
|
self.stopped = True
|
|
|
|
|
2016-04-14 06:29:40 +02:00
|
|
|
self.is_lagging = None
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
self.manage_running = False
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count = 0
|
|
|
|
self._balance_refresh_time = 3
|
2016-03-18 02:55:06 +01:00
|
|
|
self._batch_count = 20
|
2016-03-20 02:24:44 +01:00
|
|
|
self._first_run = self._FIRST_RUN_UNKNOWN
|
2015-10-29 01:59:07 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def start(self):
|
|
|
|
|
|
|
|
def start_manage():
|
|
|
|
self.stopped = False
|
|
|
|
self.manage()
|
|
|
|
return True
|
|
|
|
|
2015-10-28 06:38:01 +01:00
|
|
|
d = self._open_db()
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(lambda _: self._start())
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(lambda _: start_manage())
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
@staticmethod
|
|
|
|
def log_stop_error(err):
|
|
|
|
log.error("An error occurred stopping the wallet: %s", err.getTraceback())
|
|
|
|
|
|
|
|
def stop(self):
|
2015-09-01 04:05:50 +02:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
self.stopped = True
|
|
|
|
# If self.next_manage_call is None, then manage is currently running or else
|
|
|
|
# start has not been called, so set stopped and do nothing else.
|
|
|
|
if self.next_manage_call is not None:
|
|
|
|
self.next_manage_call.cancel()
|
|
|
|
self.next_manage_call = None
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d = self.manage(do_full=True)
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addErrback(self.log_stop_error)
|
|
|
|
d.addCallback(lambda _: self._stop())
|
|
|
|
d.addErrback(self.log_stop_error)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def manage(self, do_full=False):
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
have_set_manage_running = [False]
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count += 1
|
|
|
|
if self._manage_count % self._batch_count == 0:
|
|
|
|
self._manage_count = 0
|
|
|
|
do_full = True
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def check_if_manage_running():
|
|
|
|
|
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def fire_if_not_running():
|
|
|
|
if self.manage_running is False:
|
|
|
|
self.manage_running = True
|
|
|
|
have_set_manage_running[0] = True
|
|
|
|
d.callback(True)
|
2016-03-18 01:19:13 +01:00
|
|
|
elif do_full is False:
|
|
|
|
d.callback(False)
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
|
|
|
task.deferLater(reactor, 1, fire_if_not_running)
|
|
|
|
|
|
|
|
fire_if_not_running()
|
|
|
|
return d
|
|
|
|
|
|
|
|
d = check_if_manage_running()
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def do_manage():
|
|
|
|
if do_full:
|
|
|
|
d = self._check_expected_balances()
|
|
|
|
d.addCallback(lambda _: self._send_payments())
|
|
|
|
else:
|
|
|
|
d = defer.succeed(True)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(lambda _: self.get_balance())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def set_wallet_balance(balance):
|
2016-03-20 03:39:47 +01:00
|
|
|
if self.wallet_balance != balance:
|
|
|
|
log.info("Got a new balance: %s", str(balance))
|
2016-03-18 01:19:13 +01:00
|
|
|
self.wallet_balance = balance
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(set_wallet_balance)
|
|
|
|
return d
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(lambda should_run: do_manage() if should_run else None)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def set_next_manage_call():
|
|
|
|
if not self.stopped:
|
2016-03-18 01:19:13 +01:00
|
|
|
self.next_manage_call = reactor.callLater(self._balance_refresh_time, self.manage)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
d.addCallback(lambda _: set_next_manage_call())
|
|
|
|
|
|
|
|
def log_error(err):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.error("Something went wrong during manage. Error message: %s", err.getErrorMessage())
|
2015-08-20 17:27:15 +02:00
|
|
|
return err
|
|
|
|
|
|
|
|
d.addErrback(log_error)
|
|
|
|
|
|
|
|
def set_manage_not_running(arg):
|
|
|
|
if have_set_manage_running[0] is True:
|
|
|
|
self.manage_running = False
|
|
|
|
return arg
|
|
|
|
|
|
|
|
d.addBoth(set_manage_not_running)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_info_exchanger(self):
|
|
|
|
return LBRYcrdAddressRequester(self)
|
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
|
|
|
return LBRYcrdAddressQueryHandlerFactory(self)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def reserve_points(self, identifier, amount):
|
2015-08-20 17:27:15 +02:00
|
|
|
"""
|
|
|
|
Ensure a certain amount of points are available to be sent as payment, before the service is rendered
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
@param identifier: The peer to which the payment will ultimately be sent
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
@param amount: The amount of points to reserve
|
|
|
|
|
|
|
|
@return: A ReservedPoints object which is given to send_points once the service has been rendered
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
#if peer in self.peer_addresses:
|
|
|
|
if self.wallet_balance >= self.total_reserved_points + rounded_amount:
|
|
|
|
self.total_reserved_points += rounded_amount
|
2015-09-22 18:08:17 +02:00
|
|
|
return ReservedPoints(identifier, rounded_amount)
|
2015-08-20 17:27:15 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def cancel_point_reservation(self, reserved_points):
|
|
|
|
"""
|
|
|
|
Return all of the points that were reserved previously for some ReservedPoints object
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints previously returned by reserve_points
|
|
|
|
|
|
|
|
@return: None
|
|
|
|
"""
|
|
|
|
self.total_reserved_points -= reserved_points.amount
|
|
|
|
|
|
|
|
def send_points(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to a peer
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send, must be less than or equal to the
|
|
|
|
amount reserved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
peer = reserved_points.identifier
|
|
|
|
assert(rounded_amount <= reserved_points.amount)
|
|
|
|
assert(peer in self.peer_addresses)
|
|
|
|
self.queued_payments[self.peer_addresses[peer]] += rounded_amount
|
|
|
|
# make any unused points available
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
2015-09-08 21:42:56 +02:00
|
|
|
log.info("ordering that %s points be sent to %s", str(rounded_amount),
|
|
|
|
str(self.peer_addresses[peer]))
|
2015-08-20 17:27:15 +02:00
|
|
|
peer.update_stats('points_sent', amount)
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def send_points_to_address(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to an address
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send. must be less than or equal to the
|
|
|
|
amount reselved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
address = reserved_points.identifier
|
|
|
|
assert(rounded_amount <= reserved_points.amount)
|
|
|
|
self.queued_payments[address] += rounded_amount
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
|
|
|
log.info("Ordering that %s points be sent to %s", str(rounded_amount),
|
|
|
|
str(address))
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def add_expected_payment(self, peer, amount):
|
|
|
|
"""Increase the number of points expected to be paid by a peer"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
assert(peer in self.current_address_given_to_peer)
|
|
|
|
address = self.current_address_given_to_peer[peer]
|
2015-09-08 21:42:56 +02:00
|
|
|
log.info("expecting a payment at address %s in the amount of %s", str(address), str(rounded_amount))
|
2015-08-20 17:27:15 +02:00
|
|
|
self.expected_balances[address] += rounded_amount
|
|
|
|
expected_balance = self.expected_balances[address]
|
|
|
|
expected_time = datetime.datetime.now() + self.max_expected_payment_time
|
|
|
|
self.expected_balance_at_time.append((peer, address, expected_balance, expected_time, 0, amount))
|
|
|
|
peer.update_stats('expected_points', amount)
|
|
|
|
|
|
|
|
def update_peer_address(self, peer, address):
|
|
|
|
self.peer_addresses[peer] = address
|
|
|
|
|
|
|
|
def get_new_address_for_peer(self, peer):
|
|
|
|
def set_address_for_peer(address):
|
|
|
|
self.current_address_given_to_peer[peer] = address
|
|
|
|
return address
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self.get_new_address()
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(set_address_for_peer)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_payments(self):
|
|
|
|
payments_to_send = {}
|
|
|
|
for address, points in self.queued_payments.items():
|
|
|
|
log.info("Should be sending %s points to %s", str(points), str(address))
|
2016-02-19 06:44:08 +01:00
|
|
|
payments_to_send[address] = points
|
2016-02-17 05:10:26 +01:00
|
|
|
self.total_reserved_points -= points
|
|
|
|
self.wallet_balance -= points
|
|
|
|
del self.queued_payments[address]
|
|
|
|
if payments_to_send:
|
|
|
|
log.info("Creating a transaction with outputs %s", str(payments_to_send))
|
2016-02-19 06:44:08 +01:00
|
|
|
d = self._do_send_many(payments_to_send)
|
|
|
|
d.addCallback(lambda txid: log.debug("Sent transaction %s", txid))
|
|
|
|
return d
|
2016-02-17 05:10:26 +01:00
|
|
|
log.info("There were no payments to send")
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_stream_info_for_name(self, name):
|
|
|
|
d = self._get_value_for_name(name)
|
|
|
|
d.addCallback(self._get_stream_info_from_value, name)
|
|
|
|
return d
|
|
|
|
|
2016-05-30 21:49:25 +02:00
|
|
|
def get_txid_for_name(self, name):
|
|
|
|
d = self._get_value_for_name(name)
|
|
|
|
d.addCallback(lambda r: None if 'txid' not in r else r['txid'])
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_stream_info_from_txid(self, name, txid):
|
2016-02-17 17:47:39 +01:00
|
|
|
d = self.get_claims_from_tx(txid)
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_claim_for_name(claims):
|
|
|
|
for claim in claims:
|
|
|
|
if claim['name'] == name:
|
|
|
|
claim['txid'] = txid
|
|
|
|
return claim
|
2015-08-20 17:27:15 +02:00
|
|
|
return Failure(UnknownNameError(name))
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(get_claim_for_name)
|
|
|
|
d.addCallback(self._get_stream_info_from_value, name)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_stream_info_from_value(self, result, name):
|
|
|
|
r_dict = {}
|
|
|
|
if 'value' in result:
|
|
|
|
value = result['value']
|
2016-06-27 23:07:59 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
try:
|
|
|
|
value_dict = json.loads(value)
|
2016-03-12 20:08:15 +01:00
|
|
|
except (ValueError, TypeError):
|
2016-02-17 05:10:26 +01:00
|
|
|
return Failure(InvalidStreamInfoError(name))
|
2016-06-27 23:07:59 +02:00
|
|
|
r_dict['sources'] = value_dict['sources']
|
|
|
|
for field in BASE_METADATA_FIELDS:
|
|
|
|
r_dict[field] = value_dict[field]
|
|
|
|
for field in value_dict:
|
|
|
|
if field in OPTIONAL_METADATA_FIELDS:
|
|
|
|
r_dict[field] = value_dict[field]
|
|
|
|
|
|
|
|
if 'txid' in result:
|
|
|
|
d = self._save_name_metadata(name, r_dict['sources']['lbry_sd_hash'], str(result['txid']))
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(lambda _: r_dict)
|
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
elif 'error' in result:
|
|
|
|
log.warning("Got an error looking up a name: %s", result['error'])
|
2016-06-27 23:07:59 +02:00
|
|
|
return Failure(UnknownNameError(name))
|
|
|
|
else:
|
|
|
|
log.warning("Got an error looking up a name: %s", json.dumps(result))
|
|
|
|
return Failure(UnknownNameError(name))
|
|
|
|
|
|
|
|
def claim_name(self, name, bid, sources, metadata, fee=None):
|
|
|
|
value = {'sources': {}}
|
|
|
|
for k in SOURCE_TYPES:
|
|
|
|
if k in sources:
|
|
|
|
value['sources'][k] = sources[k]
|
|
|
|
if value['sources'] == {}:
|
|
|
|
return defer.fail("No source given")
|
|
|
|
for k in BASE_METADATA_FIELDS:
|
|
|
|
if k not in metadata:
|
|
|
|
return defer.fail("Missing required field '%s'" % k)
|
|
|
|
value[k] = metadata[k]
|
|
|
|
for k in metadata:
|
|
|
|
if k not in BASE_METADATA_FIELDS:
|
|
|
|
value[k] = metadata[k]
|
|
|
|
if fee is not None:
|
|
|
|
if "LBC" in fee:
|
|
|
|
value['fee'] = {'LBC': {'amount': fee['LBC']['amount'], 'address': fee['LBC']['address']}}
|
|
|
|
|
|
|
|
d = self._send_name_claim(name, json.dumps(value), bid)
|
2015-10-28 06:38:01 +01:00
|
|
|
|
|
|
|
def _save_metadata(txid):
|
2016-06-27 23:07:59 +02:00
|
|
|
d = self._save_name_metadata(name, value['sources']['lbry_sd_hash'], txid)
|
2015-10-28 06:38:01 +01:00
|
|
|
d.addCallback(lambda _: txid)
|
|
|
|
return d
|
|
|
|
|
|
|
|
d.addCallback(_save_metadata)
|
|
|
|
return d
|
|
|
|
|
2016-01-26 02:28:05 +01:00
|
|
|
def abandon_name(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
d1 = self.get_new_address()
|
2016-02-17 17:47:39 +01:00
|
|
|
d2 = self.get_claims_from_tx(txid)
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_txout_of_claim(claims):
|
|
|
|
for claim in claims:
|
|
|
|
if 'name' in claim and 'nOut' in claim:
|
|
|
|
return claim['nOut']
|
|
|
|
return defer.fail(ValueError("No claims in tx"))
|
|
|
|
|
|
|
|
def get_value_of_txout(nOut):
|
|
|
|
d = self._get_raw_tx(txid)
|
|
|
|
d.addCallback(self._get_decoded_tx)
|
|
|
|
d.addCallback(lambda tx: tx['vout'][nOut]['value'])
|
|
|
|
return d
|
|
|
|
|
|
|
|
d2.addCallback(get_txout_of_claim)
|
|
|
|
d2.addCallback(get_value_of_txout)
|
|
|
|
dl = defer.DeferredList([d1, d2], consumeErrors=True)
|
|
|
|
|
|
|
|
def abandon(results):
|
|
|
|
if results[0][0] and results[1][0]:
|
|
|
|
address = results[0][1]
|
2016-06-27 23:07:59 +02:00
|
|
|
amount = float(results[1][1])
|
2016-02-17 05:10:26 +01:00
|
|
|
return self._send_abandon(txid, address, amount)
|
|
|
|
elif results[0][0] is False:
|
|
|
|
return defer.fail(Failure(ValueError("Couldn't get a new address")))
|
|
|
|
else:
|
|
|
|
return results[1][1]
|
|
|
|
|
|
|
|
dl.addCallback(abandon)
|
|
|
|
return dl
|
2016-01-26 02:28:05 +01:00
|
|
|
|
|
|
|
def get_tx(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self._get_raw_tx(txid)
|
|
|
|
d.addCallback(self._get_decoded_tx)
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
def update_name(self, name, value, amount):
|
|
|
|
d = self._get_value_for_name(name)
|
2016-07-01 09:16:08 +02:00
|
|
|
d.addCallback(lambda r: (self._update_name(r['txid'], json.dumps(value), amount), r['txid']))
|
|
|
|
d.addCallback(lambda (new_txid, old_txid): self._update_name_metadata(name, value['sources']['lbry_sd_hash'], old_txid, new_txid))
|
2016-06-28 20:28:59 +02:00
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_name_and_validity_for_sd_hash(self, sd_hash):
|
|
|
|
d = self._get_claim_metadata_for_sd_hash(sd_hash)
|
|
|
|
d.addCallback(lambda name_txid: self._get_status_of_claim(name_txid[1], name_txid[0], sd_hash) if name_txid is not None else None)
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_available_balance(self):
|
|
|
|
return float(self.wallet_balance - self.total_reserved_points)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
def is_first_run(self):
|
|
|
|
if self._first_run == self._FIRST_RUN_UNKNOWN:
|
|
|
|
d = self._check_first_run()
|
|
|
|
|
|
|
|
def set_first_run(is_first):
|
|
|
|
self._first_run = self._FIRST_RUN_YES if is_first else self._FIRST_RUN_NO
|
|
|
|
|
|
|
|
d.addCallback(set_first_run)
|
|
|
|
else:
|
2016-04-07 09:12:09 +02:00
|
|
|
d = defer.succeed(self._FIRST_RUN_YES if self._first_run else self._FIRST_RUN_NO)
|
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
d.addCallback(lambda _: self._first_run == self._FIRST_RUN_YES)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_status_of_claim(self, txid, name, sd_hash):
|
2016-02-17 17:47:39 +01:00
|
|
|
d = self.get_claims_from_tx(txid)
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_status(claims):
|
|
|
|
if claims is None:
|
|
|
|
claims = []
|
|
|
|
for claim in claims:
|
|
|
|
if 'in claim trie' in claim:
|
|
|
|
if 'name' in claim and str(claim['name']) == name and 'value' in claim:
|
|
|
|
try:
|
|
|
|
value_dict = json.loads(claim['value'])
|
2016-03-12 20:08:15 +01:00
|
|
|
except (ValueError, TypeError):
|
2016-02-17 05:10:26 +01:00
|
|
|
return None
|
2016-03-12 20:25:46 +01:00
|
|
|
claim_sd_hash = None
|
|
|
|
if 'stream_hash' in value_dict:
|
|
|
|
claim_sd_hash = str(value_dict['stream_hash'])
|
|
|
|
if 'sources' in value_dict and 'lbrynet_sd_hash' in value_dict['sources']:
|
|
|
|
claim_sd_hash = str(value_dict['sources']['lbry_sd_hash'])
|
|
|
|
if claim_sd_hash is not None and claim_sd_hash == sd_hash:
|
2016-02-17 05:10:26 +01:00
|
|
|
if 'is controlling' in claim and claim['is controlling']:
|
|
|
|
return name, "valid"
|
|
|
|
if claim['in claim trie']:
|
|
|
|
return name, "invalid"
|
|
|
|
if 'in queue' in claim and claim['in queue']:
|
|
|
|
return name, "pending"
|
|
|
|
return name, "unconfirmed"
|
|
|
|
return None
|
|
|
|
|
|
|
|
d.addCallback(get_status)
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _check_expected_balances(self):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
balances_to_check = []
|
|
|
|
try:
|
|
|
|
while self.expected_balance_at_time[0][3] < now:
|
|
|
|
balances_to_check.append(self.expected_balance_at_time.popleft())
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
ds = []
|
|
|
|
for balance_to_check in balances_to_check:
|
|
|
|
log.info("Checking balance of address %s", str(balance_to_check[1]))
|
|
|
|
d = self._get_balance_for_address(balance_to_check[1])
|
|
|
|
d.addCallback(lambda bal: bal >= balance_to_check[2])
|
|
|
|
ds.append(d)
|
|
|
|
dl = defer.DeferredList(ds)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def handle_checks(results):
|
|
|
|
from future_builtins import zip
|
|
|
|
for balance, (success, result) in zip(balances_to_check, results):
|
|
|
|
peer = balance[0]
|
|
|
|
if success is True:
|
|
|
|
if result is False:
|
|
|
|
if balance[4] <= 1: # first or second strike, give them another chance
|
|
|
|
new_expected_balance = (balance[0],
|
|
|
|
balance[1],
|
|
|
|
balance[2],
|
|
|
|
datetime.datetime.now() + self.max_expected_payment_time,
|
|
|
|
balance[4] + 1,
|
|
|
|
balance[5])
|
|
|
|
self.expected_balance_at_time.append(new_expected_balance)
|
|
|
|
peer.update_score(-5.0)
|
|
|
|
else:
|
|
|
|
peer.update_score(-50.0)
|
|
|
|
else:
|
|
|
|
if balance[4] == 0:
|
|
|
|
peer.update_score(balance[5])
|
|
|
|
peer.update_stats('points_received', balance[5])
|
|
|
|
else:
|
|
|
|
log.warning("Something went wrong checking a balance. Peer: %s, account: %s,"
|
|
|
|
"expected balance: %s, expected time: %s, count: %s, error: %s",
|
|
|
|
str(balance[0]), str(balance[1]), str(balance[2]), str(balance[3]),
|
|
|
|
str(balance[4]), str(result.getErrorMessage()))
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
dl.addCallback(handle_checks)
|
|
|
|
return dl
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _open_db(self):
|
|
|
|
self.db = adbapi.ConnectionPool('sqlite3', os.path.join(self.db_dir, "blockchainname.db"),
|
|
|
|
check_same_thread=False)
|
|
|
|
return self.db.runQuery("create table if not exists name_metadata (" +
|
|
|
|
" name text, " +
|
|
|
|
" txid text, " +
|
|
|
|
" sd_hash text)")
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _save_name_metadata(self, name, sd_hash, txid):
|
2016-05-11 01:03:14 +02:00
|
|
|
d = self.db.runQuery("select * from name_metadata where txid=?", (txid,))
|
|
|
|
d.addCallback(lambda r: self.db.runQuery("insert into name_metadata values (?, ?, ?)", (name, txid, sd_hash))
|
|
|
|
if not len(r) else None)
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
return d
|
2016-02-11 14:32:48 +01:00
|
|
|
|
2016-06-29 05:20:28 +02:00
|
|
|
def _update_name_metadata(self, name, sd_hash, old_txid, new_txid):
|
2016-07-01 09:16:08 +02:00
|
|
|
d = self.db.runQuery("delete * from name_metadata where txid=? and sd_hash=?", (old_txid, sd_hash))
|
2016-06-29 05:20:28 +02:00
|
|
|
d.addCallback(lambda _: self.db.runQuery("insert into name_metadata values (?, ?, ?)", (name, new_txid, sd_hash)))
|
|
|
|
d.addCallback(lambda _: new_txid)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
d = self.db.runQuery("select name, txid from name_metadata where sd_hash=?", (sd_hash,))
|
|
|
|
d.addCallback(lambda r: r[0] if len(r) else None)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
######### Must be overridden #########
|
|
|
|
|
|
|
|
def get_balance(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def get_new_address(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_block(self, blockhash):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-12-04 02:56:56 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_best_blockhash(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def get_name_claims(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
def _check_first_run(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_raw_tx(self, txid):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _send_name_claim(self, name, val, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_decoded_tx(self, raw_tx):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-12-04 02:56:56 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_abandon(self, txid, address, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
def _update_name(self, txid, value, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _do_send_many(self, payments_to_send):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_claims_from_tx(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _start(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _stop(self):
|
|
|
|
pass
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
class LBRYcrdWallet(LBRYWallet):
|
|
|
|
def __init__(self, db_dir, wallet_dir=None, wallet_conf=None, lbrycrdd_path=None):
|
|
|
|
LBRYWallet.__init__(self, db_dir)
|
|
|
|
self.started_lbrycrdd = False
|
|
|
|
self.wallet_dir = wallet_dir
|
|
|
|
self.wallet_conf = wallet_conf
|
|
|
|
self.lbrycrdd = None
|
|
|
|
self.lbrycrdd_path = lbrycrdd_path
|
|
|
|
|
|
|
|
settings = self._get_rpc_conf()
|
|
|
|
rpc_user = settings["username"]
|
|
|
|
rpc_pass = settings["password"]
|
|
|
|
rpc_port = settings["rpc_port"]
|
|
|
|
rpc_url = "127.0.0.1"
|
|
|
|
self.rpc_conn_string = "http://%s:%s@%s:%s" % (rpc_user, rpc_pass, rpc_url, str(rpc_port))
|
|
|
|
|
|
|
|
def _start(self):
|
|
|
|
return threads.deferToThread(self._make_connection)
|
|
|
|
|
|
|
|
def _stop(self):
|
|
|
|
if self.lbrycrdd_path is not None:
|
|
|
|
return self._stop_daemon()
|
|
|
|
|
|
|
|
def _make_connection(self):
|
|
|
|
alert.info("Connecting to lbrycrdd...")
|
|
|
|
if self.lbrycrdd_path is not None:
|
|
|
|
self._start_daemon()
|
|
|
|
self._get_info_rpc()
|
|
|
|
log.info("Connected!")
|
|
|
|
alert.info("Connected to lbrycrdd.")
|
|
|
|
|
|
|
|
def _get_rpc_conf(self):
|
2015-10-29 01:59:07 +01:00
|
|
|
settings = {"username": "rpcuser",
|
|
|
|
"password": "rpcpassword",
|
2016-06-24 19:12:27 +02:00
|
|
|
"rpc_port": 9245}
|
2015-10-29 01:59:07 +01:00
|
|
|
if os.path.exists(self.wallet_conf):
|
|
|
|
conf = open(self.wallet_conf)
|
|
|
|
for l in conf:
|
|
|
|
if l.startswith("rpcuser="):
|
|
|
|
settings["username"] = l[8:].rstrip('\n')
|
|
|
|
if l.startswith("rpcpassword="):
|
|
|
|
settings["password"] = l[12:].rstrip('\n')
|
|
|
|
if l.startswith("rpcport="):
|
|
|
|
settings["rpc_port"] = int(l[8:].rstrip('\n'))
|
|
|
|
return settings
|
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
def _check_first_run(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self.get_balance()
|
|
|
|
d.addCallback(lambda bal: threads.deferToThread(self._get_num_addresses_rpc) if bal == 0 else 2)
|
|
|
|
d.addCallback(lambda num_addresses: True if num_addresses <= 1 else False)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_new_address(self):
|
|
|
|
return threads.deferToThread(self._get_new_address_rpc)
|
|
|
|
|
|
|
|
def get_balance(self):
|
|
|
|
return threads.deferToThread(self._get_wallet_balance_rpc)
|
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
|
|
|
d = threads.deferToThread(self._get_best_blockhash_rpc)
|
|
|
|
d.addCallback(lambda blockhash: threads.deferToThread(self._get_block_rpc, blockhash))
|
|
|
|
d.addCallback(
|
|
|
|
lambda block: block['time'] if 'time' in block else Failure(ValueError("Could not get a block time")))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_name_claims(self):
|
|
|
|
return threads.deferToThread(self._get_name_claims_rpc)
|
|
|
|
|
|
|
|
def get_block(self, blockhash):
|
|
|
|
return threads.deferToThread(self._get_block_rpc, blockhash)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_best_blockhash(self):
|
|
|
|
d = threads.deferToThread(self._get_blockchain_info_rpc)
|
|
|
|
d.addCallback(lambda blockchain_info: blockchain_info['bestblockhash'])
|
|
|
|
return d
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_nametrie(self):
|
|
|
|
return threads.deferToThread(self._get_nametrie_rpc)
|
|
|
|
|
|
|
|
def start_miner(self):
|
|
|
|
d = threads.deferToThread(self._get_gen_status_rpc)
|
|
|
|
d.addCallback(lambda status: threads.deferToThread(self._set_gen_status_rpc, True) if not status
|
|
|
|
else "Miner was already running")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def stop_miner(self):
|
|
|
|
d = threads.deferToThread(self._get_gen_status_rpc)
|
|
|
|
d.addCallback(lambda status: threads.deferToThread(self._set_gen_status_rpc, False) if status
|
|
|
|
else "Miner wasn't running")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_miner_status(self):
|
|
|
|
return threads.deferToThread(self._get_gen_status_rpc)
|
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
|
|
|
return threads.deferToThread(self._get_balance_for_address_rpc, address)
|
|
|
|
|
|
|
|
def _do_send_many(self, payments_to_send):
|
2016-02-19 06:44:08 +01:00
|
|
|
outputs = {address: float(points) for address, points in payments_to_send.iteritems()}
|
|
|
|
return threads.deferToThread(self._do_send_many_rpc, outputs)
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def _send_name_claim(self, name, value, amount):
|
|
|
|
return threads.deferToThread(self._send_name_claim_rpc, name, value, amount)
|
|
|
|
|
|
|
|
def _get_raw_tx(self, txid):
|
|
|
|
return threads.deferToThread(self._get_raw_tx_rpc, txid)
|
|
|
|
|
|
|
|
def _get_decoded_tx(self, raw_tx):
|
|
|
|
return threads.deferToThread(self._get_decoded_tx_rpc, raw_tx)
|
|
|
|
|
|
|
|
def _send_abandon(self, txid, address, amount):
|
|
|
|
return threads.deferToThread(self._send_abandon_rpc, txid, address, amount)
|
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
def _update_name(self, txid, value, amount):
|
|
|
|
return threads.deferToThread(self._update_name_rpc, txid, value, amount)
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_claims_from_tx(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
return threads.deferToThread(self._get_claims_from_tx_rpc, txid)
|
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
|
|
|
return threads.deferToThread(self._get_value_for_name_rpc, name)
|
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
def _get_rpc_conn(self):
|
|
|
|
return AuthServiceProxy(self.rpc_conn_string)
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def _start_daemon(self):
|
|
|
|
|
2015-10-08 17:01:50 +02:00
|
|
|
tries = 0
|
|
|
|
try:
|
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-07-04 22:40:52 +02:00
|
|
|
try:
|
|
|
|
rpc_conn.getinfo()
|
|
|
|
except ValueError:
|
|
|
|
log.exception('Failed to get rpc info. Rethrowing with a hopefully more useful error message')
|
|
|
|
raise Exception('Failed to get rpc info from lbrycrdd. Try restarting lbrycrdd')
|
2015-10-08 17:01:50 +02:00
|
|
|
log.info("lbrycrdd was already running when LBRYcrdWallet was started.")
|
|
|
|
return
|
|
|
|
except (socket.error, JSONRPCException):
|
|
|
|
tries += 1
|
|
|
|
log.info("lbrcyrdd was not running when LBRYcrdWallet was started. Attempting to start it.")
|
|
|
|
|
2015-10-02 22:38:57 +02:00
|
|
|
try:
|
|
|
|
if os.name == "nt":
|
|
|
|
si = subprocess.STARTUPINFO
|
|
|
|
si.dwFlags = subprocess.STARTF_USESHOWWINDOW
|
|
|
|
si.wShowWindow = subprocess.SW_HIDE
|
|
|
|
self.lbrycrdd = subprocess.Popen([self.lbrycrdd_path, "-datadir=%s" % self.wallet_dir,
|
|
|
|
"-conf=%s" % self.wallet_conf], startupinfo=si)
|
|
|
|
else:
|
2015-12-20 09:29:13 +01:00
|
|
|
if sys.platform == 'darwin':
|
|
|
|
os.chdir("/Applications/LBRY.app/Contents/Resources")
|
2015-10-02 22:38:57 +02:00
|
|
|
self.lbrycrdd = subprocess.Popen([self.lbrycrdd_path, "-datadir=%s" % self.wallet_dir,
|
|
|
|
"-conf=%s" % self.wallet_conf])
|
|
|
|
self.started_lbrycrdd = True
|
|
|
|
except OSError:
|
|
|
|
import traceback
|
|
|
|
log.error("Couldn't launch lbrycrdd at path %s: %s", self.lbrycrdd_path, traceback.format_exc())
|
|
|
|
raise ValueError("Couldn't launch lbrycrdd. Tried %s" % self.lbrycrdd_path)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-10-08 17:01:50 +02:00
|
|
|
while tries < 6:
|
2015-08-20 17:27:15 +02:00
|
|
|
try:
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2015-08-20 17:27:15 +02:00
|
|
|
rpc_conn.getinfo()
|
|
|
|
break
|
|
|
|
except (socket.error, JSONRPCException):
|
|
|
|
tries += 1
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("Failed to connect to lbrycrdd.")
|
2015-12-04 02:56:56 +01:00
|
|
|
if tries < 6:
|
2015-08-20 17:27:15 +02:00
|
|
|
time.sleep(2 ** tries)
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("Trying again in %d seconds", 2 ** tries)
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("Giving up.")
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
|
|
|
self.lbrycrdd.terminate()
|
|
|
|
raise ValueError("Couldn't open lbrycrdd")
|
|
|
|
|
|
|
|
def _stop_daemon(self):
|
|
|
|
if self.lbrycrdd is not None and self.started_lbrycrdd is True:
|
2016-01-06 06:56:45 +01:00
|
|
|
alert.info("Stopping lbrycrdd...")
|
2016-02-17 05:10:26 +01:00
|
|
|
d = threads.deferToThread(self._stop_rpc)
|
2016-01-06 06:56:45 +01:00
|
|
|
d.addCallback(lambda _: alert.info("Stopped lbrycrdd."))
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_balance_for_address_rpc(self, address):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-02-17 05:10:26 +01:00
|
|
|
balance = rpc_conn.getreceivedbyaddress(address)
|
|
|
|
log.debug("received balance for %s: %s", str(address), str(balance))
|
|
|
|
return balance
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
@_catch_connection_error
|
|
|
|
def _do_send_many_rpc(self, payments):
|
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-02-19 06:44:08 +01:00
|
|
|
return rpc_conn.sendmany("", payments)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_info_rpc(self):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getinfo()
|
|
|
|
|
2016-01-26 02:28:05 +01:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_name_claims_rpc(self):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.listnameclaims()
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_gen_status_rpc(self):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getgenerate()
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _set_gen_status_rpc(self, b):
|
2016-01-26 02:28:05 +01:00
|
|
|
if b:
|
|
|
|
log.info("Starting miner")
|
|
|
|
else:
|
|
|
|
log.info("Stopping miner")
|
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.setgenerate(b)
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_raw_tx_rpc(self, txid):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getrawtransaction(txid)
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_decoded_tx_rpc(self, raw):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.decoderawtransaction(raw)
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_abandon_rpc(self, txid, address, amount):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-06-27 23:07:59 +02:00
|
|
|
return rpc_conn.abandonclaim(txid, address, amount)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_blockchain_info_rpc(self):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getblockchaininfo()
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_block_rpc(self, blockhash):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getblock(blockhash)
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_claims_from_tx_rpc(self, txid):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return rpc_conn.getclaimsfortx(txid)
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_nametrie_rpc(self):
|
2016-01-26 02:28:05 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-06-24 19:12:27 +02:00
|
|
|
return rpc_conn.getclaimtrie()
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_wallet_balance_rpc(self):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2015-08-20 17:27:15 +02:00
|
|
|
return rpc_conn.getbalance("")
|
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_new_address_rpc(self):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2015-08-20 17:27:15 +02:00
|
|
|
return rpc_conn.getnewaddress()
|
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_value_for_name_rpc(self, name):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-02-17 05:10:26 +01:00
|
|
|
return rpc_conn.getvalueforname(name)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-06-28 20:28:59 +02:00
|
|
|
def _update_name_rpc(self, txid, value, amount):
|
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-07-01 09:16:08 +02:00
|
|
|
return rpc_conn.updateclaim(txid, value, amount)
|
2016-02-11 14:32:48 +01:00
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_name_claim_rpc(self, name, value, amount):
|
2015-09-01 04:05:50 +02:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2015-12-15 20:42:29 +01:00
|
|
|
try:
|
|
|
|
return str(rpc_conn.claimname(name, value, amount))
|
|
|
|
except JSONRPCException as e:
|
|
|
|
if 'message' in e.error and e.error['message'] == "Insufficient funds":
|
|
|
|
raise InsufficientFundsError()
|
|
|
|
elif 'message' in e.error:
|
|
|
|
raise ValueError(e.error['message'])
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-10-28 06:38:01 +01:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_num_addresses_rpc(self):
|
2015-12-04 02:56:56 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
return len(rpc_conn.getaddressesbyaccount(""))
|
|
|
|
|
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_best_blockhash_rpc(self):
|
2015-12-04 02:56:56 +01:00
|
|
|
rpc_conn = self._get_rpc_conn()
|
2016-02-17 05:10:26 +01:00
|
|
|
return rpc_conn.getbestblockhash()
|
2015-10-28 06:38:01 +01:00
|
|
|
|
2015-09-01 04:05:50 +02:00
|
|
|
@_catch_connection_error
|
2016-02-17 05:10:26 +01:00
|
|
|
def _stop_rpc(self):
|
2015-09-09 18:17:46 +02:00
|
|
|
# check if our lbrycrdd is actually running, or if we connected to one that was already
|
|
|
|
# running and ours failed to start
|
|
|
|
if self.lbrycrdd.poll() is None:
|
|
|
|
rpc_conn = self._get_rpc_conn()
|
|
|
|
rpc_conn.stop()
|
|
|
|
self.lbrycrdd.wait()
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
class LBRYumWallet(LBRYWallet):
|
|
|
|
|
|
|
|
def __init__(self, db_dir):
|
|
|
|
LBRYWallet.__init__(self, db_dir)
|
|
|
|
self.config = None
|
|
|
|
self.network = None
|
|
|
|
self.wallet = None
|
|
|
|
self.cmd_runner = None
|
|
|
|
self.first_run = False
|
2016-02-23 05:31:07 +01:00
|
|
|
self.printed_retrieving_headers = False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = None
|
|
|
|
self._catch_up_check = None
|
|
|
|
self._caught_up_counter = 0
|
2016-04-18 01:54:04 +02:00
|
|
|
self._lag_counter = 0
|
2016-04-12 04:28:46 +02:00
|
|
|
self.blocks_behind_alert = 0
|
2016-04-12 08:03:57 +02:00
|
|
|
self.catchup_progress = 0
|
|
|
|
self.max_behind = 0
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _start(self):
|
|
|
|
|
|
|
|
network_start_d = defer.Deferred()
|
|
|
|
|
|
|
|
def setup_network():
|
|
|
|
self.config = SimpleConfig()
|
|
|
|
self.network = Network(self.config)
|
2016-03-12 20:58:58 +01:00
|
|
|
alert.info("Loading the wallet...")
|
2016-02-19 06:44:08 +01:00
|
|
|
return defer.succeed(self.network.start())
|
|
|
|
|
|
|
|
d = setup_network()
|
|
|
|
|
|
|
|
def check_started():
|
|
|
|
if self.network.is_connecting():
|
2016-02-23 05:31:07 +01:00
|
|
|
if not self.printed_retrieving_headers and self.network.blockchain.retrieving_headers:
|
|
|
|
alert.info("Running the wallet for the first time...this may take a moment.")
|
|
|
|
self.printed_retrieving_headers = True
|
2016-02-19 06:44:08 +01:00
|
|
|
return False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
2016-02-19 06:44:08 +01:00
|
|
|
if self.network.is_connected():
|
|
|
|
network_start_d.callback(True)
|
|
|
|
else:
|
|
|
|
network_start_d.errback(ValueError("Failed to connect to network."))
|
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = task.LoopingCall(check_started)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
d.addCallback(lambda _: self._start_check.start(.1))
|
2016-02-19 06:44:08 +01:00
|
|
|
d.addCallback(lambda _: network_start_d)
|
|
|
|
d.addCallback(lambda _: self._load_wallet())
|
|
|
|
d.addCallback(lambda _: self._get_cmd_runner())
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _stop(self):
|
2016-03-23 03:42:45 +01:00
|
|
|
if self._start_check is not None:
|
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
|
|
|
|
|
|
|
if self._catch_up_check is not None:
|
|
|
|
self._catch_up_check.stop()
|
|
|
|
self._catch_up_check = None
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def check_stopped():
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
if self.network.is_connected():
|
|
|
|
return False
|
2016-02-19 06:44:08 +01:00
|
|
|
stop_check.stop()
|
|
|
|
self.network = None
|
|
|
|
d.callback(True)
|
|
|
|
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
self.network.stop()
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
stop_check = task.LoopingCall(check_stopped)
|
|
|
|
stop_check.start(.1)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _load_wallet(self):
|
|
|
|
|
|
|
|
def get_wallet():
|
|
|
|
path = self.config.get_wallet_path()
|
|
|
|
storage = WalletStorage(path)
|
|
|
|
wallet = Wallet(storage)
|
|
|
|
if not storage.file_exists:
|
|
|
|
self.first_run = True
|
|
|
|
seed = wallet.make_seed()
|
|
|
|
wallet.add_seed(seed, None)
|
|
|
|
wallet.create_master_keys(None)
|
|
|
|
wallet.create_main_account()
|
|
|
|
wallet.synchronize()
|
|
|
|
self.wallet = wallet
|
|
|
|
|
2016-03-12 20:58:58 +01:00
|
|
|
blockchain_caught_d = defer.Deferred()
|
|
|
|
|
|
|
|
def check_caught_up():
|
|
|
|
local_height = self.network.get_local_height()
|
|
|
|
remote_height = self.network.get_server_height()
|
|
|
|
|
|
|
|
if remote_height != 0 and remote_height - local_height <= 5:
|
2016-03-23 03:42:45 +01:00
|
|
|
msg = ""
|
|
|
|
if self._caught_up_counter != 0:
|
|
|
|
msg += "All caught up. "
|
|
|
|
msg += "Wallet loaded."
|
|
|
|
alert.info(msg)
|
|
|
|
self._catch_up_check.stop()
|
|
|
|
self._catch_up_check = None
|
2016-03-12 20:58:58 +01:00
|
|
|
blockchain_caught_d.callback(True)
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
elif remote_height != 0:
|
2016-04-18 01:54:04 +02:00
|
|
|
past_blocks_behind = self.blocks_behind_alert
|
2016-04-14 06:29:40 +02:00
|
|
|
self.blocks_behind_alert = remote_height - local_height
|
2016-04-18 01:54:04 +02:00
|
|
|
if self.blocks_behind_alert < past_blocks_behind:
|
|
|
|
self._lag_counter = 0
|
|
|
|
self.is_lagging = False
|
|
|
|
else:
|
|
|
|
self._lag_counter += 1
|
|
|
|
if self._lag_counter >= 900:
|
|
|
|
self.is_lagging = True
|
|
|
|
|
2016-04-14 06:29:40 +02:00
|
|
|
if self.blocks_behind_alert > self.max_behind:
|
|
|
|
self.max_behind = self.blocks_behind_alert
|
|
|
|
self.catchup_progress = int(100 * (self.blocks_behind_alert / (5 + self.max_behind)))
|
2016-03-23 03:42:45 +01:00
|
|
|
if self._caught_up_counter == 0:
|
2016-04-18 05:23:20 +02:00
|
|
|
alert.info('Catching up with the blockchain...showing blocks left...')
|
2016-03-23 03:42:45 +01:00
|
|
|
if self._caught_up_counter % 30 == 0:
|
|
|
|
alert.info('%d...', (remote_height - local_height))
|
2016-04-12 08:03:57 +02:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._caught_up_counter += 1
|
|
|
|
|
2016-03-12 20:58:58 +01:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._catch_up_check = task.LoopingCall(check_caught_up)
|
2016-03-12 20:58:58 +01:00
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
d = threads.deferToThread(get_wallet)
|
|
|
|
d.addCallback(self._save_wallet)
|
|
|
|
d.addCallback(lambda _: self.wallet.start_threads(self.network))
|
2016-03-23 03:42:45 +01:00
|
|
|
d.addCallback(lambda _: self._catch_up_check.start(.1))
|
2016-03-12 20:58:58 +01:00
|
|
|
d.addCallback(lambda _: blockchain_caught_d)
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_cmd_runner(self):
|
|
|
|
self.cmd_runner = Commands(self.config, self.wallet, self.network)
|
|
|
|
|
|
|
|
def get_balance(self):
|
|
|
|
cmd = known_commands['getbalance']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
d = threads.deferToThread(func)
|
|
|
|
d.addCallback(lambda result: result['unmatured'] if 'unmatured' in result else result['confirmed'])
|
|
|
|
d.addCallback(Decimal)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_new_address(self):
|
|
|
|
d = threads.deferToThread(self.wallet.create_new_address)
|
|
|
|
d.addCallback(self._save_wallet)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_block(self, blockhash):
|
2016-02-22 18:18:28 +01:00
|
|
|
cmd = known_commands['getblock']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, blockhash)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
|
|
|
header = self.network.get_header(self.network.get_local_height())
|
|
|
|
return defer.succeed(header['timestamp'])
|
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
|
|
|
height = self.network.get_local_height()
|
|
|
|
d = threads.deferToThread(self.network.blockchain.read_header, height)
|
|
|
|
d.addCallback(lambda header: self.network.blockchain.hash_header(header))
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_name_claims(self):
|
2016-02-26 07:45:52 +01:00
|
|
|
cmd = known_commands['getnameclaims']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-03-20 02:24:44 +01:00
|
|
|
def _check_first_run(self):
|
2016-02-19 06:44:08 +01:00
|
|
|
return defer.succeed(self.first_run)
|
|
|
|
|
|
|
|
def _get_raw_tx(self, txid):
|
|
|
|
cmd = known_commands['gettransaction']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, txid)
|
|
|
|
|
|
|
|
def _send_name_claim(self, name, val, amount):
|
2016-02-26 07:45:52 +01:00
|
|
|
def send_claim(address):
|
|
|
|
cmd = known_commands['claimname']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, address, amount, name, val)
|
|
|
|
d = self.get_new_address()
|
|
|
|
d.addCallback(send_claim)
|
|
|
|
d.addCallback(self._broadcast_transaction)
|
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _get_decoded_tx(self, raw_tx):
|
2016-02-26 07:45:52 +01:00
|
|
|
tx = Transaction(raw_tx)
|
|
|
|
decoded_tx = {}
|
|
|
|
decoded_tx['vout'] = []
|
|
|
|
for output in tx.outputs():
|
|
|
|
out = {}
|
2016-04-05 04:20:15 +02:00
|
|
|
out['value'] = Decimal(output[2]) / Decimal(COIN)
|
2016-02-26 07:45:52 +01:00
|
|
|
decoded_tx['vout'].append(out)
|
|
|
|
return decoded_tx
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _send_abandon(self, txid, address, amount):
|
2016-04-07 09:12:09 +02:00
|
|
|
log.info("Abandon " + str(txid) + " " + str(address) + " " + str(amount))
|
2016-02-26 07:45:52 +01:00
|
|
|
cmd = known_commands['abandonclaim']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
d = threads.deferToThread(func, txid, address, amount)
|
|
|
|
d.addCallback(self._broadcast_transaction)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _broadcast_transaction(self, raw_tx):
|
2016-04-07 09:12:09 +02:00
|
|
|
log.info("Broadcast: " + str(raw_tx))
|
2016-02-26 07:45:52 +01:00
|
|
|
cmd = known_commands['broadcast']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
d = threads.deferToThread(func, raw_tx)
|
|
|
|
d.addCallback(self._save_wallet)
|
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _do_send_many(self, payments_to_send):
|
|
|
|
log.warning("Doing send many. payments to send: %s", str(payments_to_send))
|
|
|
|
outputs = [(TYPE_ADDRESS, address, int(amount*COIN)) for address, amount in payments_to_send.iteritems()]
|
|
|
|
d = threads.deferToThread(self.wallet.mktx, outputs, None, self.config)
|
|
|
|
d.addCallback(lambda tx: threads.deferToThread(self.wallet.sendtx, tx))
|
|
|
|
d.addCallback(self._save_wallet)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
|
|
|
cmd = known_commands['getvalueforname']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, name)
|
|
|
|
|
|
|
|
def get_claims_from_tx(self, txid):
|
|
|
|
cmd = known_commands['getclaimsfromtx']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, txid)
|
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
|
|
|
return defer.succeed(Decimal(self.wallet.get_addr_received(address))/COIN)
|
|
|
|
|
2016-02-22 20:24:49 +01:00
|
|
|
def get_nametrie(self):
|
2016-02-25 23:18:18 +01:00
|
|
|
cmd = known_commands['getclaimtrie']
|
2016-02-22 20:24:49 +01:00
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func)
|
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
def get_history(self):
|
|
|
|
cmd = known_commands['history']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func)
|
|
|
|
|
|
|
|
def get_tx_json(self, txid):
|
|
|
|
def _decode(raw_tx):
|
|
|
|
tx = Transaction(raw_tx).deserialize()
|
|
|
|
decoded_tx = {}
|
|
|
|
for txkey in tx.keys():
|
|
|
|
if isinstance(tx[txkey], list):
|
|
|
|
decoded_tx[txkey] = []
|
|
|
|
for i in tx[txkey]:
|
|
|
|
tmp = {}
|
|
|
|
for k in i.keys():
|
|
|
|
if isinstance(i[k], Decimal):
|
|
|
|
tmp[k] = float(i[k] / 1e8)
|
|
|
|
else:
|
|
|
|
tmp[k] = i[k]
|
|
|
|
decoded_tx[txkey].append(tmp)
|
|
|
|
else:
|
|
|
|
decoded_tx[txkey] = tx[txkey]
|
|
|
|
return decoded_tx
|
|
|
|
|
|
|
|
d = self._get_raw_tx(txid)
|
|
|
|
d.addCallback(_decode)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_pub_keys(self, wallet):
|
|
|
|
cmd = known_commands['getpubkeys']
|
|
|
|
func = getattr(self.cmd_runner, cmd.name)
|
|
|
|
return threads.deferToThread(func, wallet)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _save_wallet(self, val):
|
|
|
|
d = threads.deferToThread(self.wallet.storage.write)
|
|
|
|
d.addCallback(lambda _: val)
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class LBRYcrdAddressRequester(object):
|
|
|
|
implements([IRequestCreator])
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
|
|
|
######### IRequestCreator #########
|
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'lbrycrd_address': True}, 'lbrycrd_address')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_address_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
|
|
|
######### internal calls #########
|
|
|
|
|
|
|
|
def _handle_address_response(self, response_dict, peer, request, protocol):
|
|
|
|
assert request.response_identifier in response_dict, \
|
|
|
|
"Expected %s in dict but did not get it" % request.response_identifier
|
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
address = response_dict[request.response_identifier]
|
|
|
|
self.wallet.update_peer_address(peer, address)
|
|
|
|
|
|
|
|
def _request_failed(self, err, peer):
|
|
|
|
if not err.check(RequestCanceledError):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("A peer failed to send a valid public key response. Error: %s, peer: %s",
|
|
|
|
err.getErrorMessage(), str(peer))
|
2016-01-22 21:50:18 +01:00
|
|
|
return err
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandlerFactory(object):
|
|
|
|
implements(IQueryHandlerFactory)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
|
|
|
######### IQueryHandlerFactory #########
|
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = LBRYcrdAddressQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'lbrycrd_address'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return "LBRYcrd Address - an address for receiving payments via LBRYcrd"
|
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandler(object):
|
|
|
|
implements(IQueryHandler)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['lbrycrd_address']
|
|
|
|
self.address = None
|
|
|
|
self.peer = None
|
|
|
|
|
|
|
|
######### IQueryHandler #########
|
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
|
|
|
def handle_queries(self, queries):
|
|
|
|
|
|
|
|
def create_response(address):
|
|
|
|
self.address = address
|
|
|
|
fields = {'lbrycrd_address': address}
|
|
|
|
return fields
|
|
|
|
|
|
|
|
if self.query_identifiers[0] in queries:
|
|
|
|
d = self.wallet.get_new_address_for_peer(self.peer)
|
|
|
|
d.addCallback(create_response)
|
|
|
|
return d
|
|
|
|
if self.address is None:
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("Expected a request for an address, but did not receive one")
|
2015-08-20 17:27:15 +02:00
|
|
|
return defer.fail(Failure(ValueError("Expected but did not receive an address request")))
|
|
|
|
else:
|
2016-07-04 22:40:52 +02:00
|
|
|
return defer.succeed({})
|