2016-07-26 03:45:42 +02:00
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import json
|
|
|
|
import os
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
from twisted.internet import threads, reactor, defer, task
|
|
|
|
from twisted.python.failure import Failure
|
2015-10-28 06:38:01 +01:00
|
|
|
from twisted.enterprise import adbapi
|
2015-08-20 17:27:15 +02:00
|
|
|
from collections import defaultdict, deque
|
|
|
|
from zope.interface import implements
|
2016-10-14 14:42:08 +02:00
|
|
|
from jsonschema import ValidationError
|
2015-08-20 17:27:15 +02:00
|
|
|
from decimal import Decimal
|
2016-07-26 03:45:42 +02:00
|
|
|
|
|
|
|
from lbryum import SimpleConfig, Network
|
2016-12-12 20:00:23 +01:00
|
|
|
from lbryum.lbrycrd import COIN, RECOMMENDED_CLAIMTRIE_HASH_CONFIRMS
|
2016-09-26 03:54:15 +02:00
|
|
|
import lbryum.wallet
|
2016-07-26 03:45:42 +02:00
|
|
|
from lbryum.commands import known_commands, Commands
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
from lbrynet.core.sqlite_helpers import rerun_if_locked
|
2016-09-27 20:18:35 +02:00
|
|
|
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
|
2016-07-26 03:45:42 +02:00
|
|
|
from lbrynet.core.client.ClientRequest import ClientRequest
|
|
|
|
from lbrynet.core.Error import UnknownNameError, InvalidStreamInfoError, RequestCanceledError
|
2016-11-28 20:23:10 +01:00
|
|
|
from lbrynet.db_migrator.migrate1to2 import UNSET_NOUT
|
2016-09-27 20:18:35 +02:00
|
|
|
from lbrynet.metadata.Metadata import Metadata
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
log = logging.getLogger(__name__)
|
2015-10-22 08:23:12 +02:00
|
|
|
alert = logging.getLogger("lbryalert." + __name__)
|
2015-09-08 21:42:56 +02:00
|
|
|
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class ReservedPoints(object):
|
|
|
|
def __init__(self, identifier, amount):
|
|
|
|
self.identifier = identifier
|
|
|
|
self.amount = amount
|
|
|
|
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
class ClaimOutpoint(dict):
|
|
|
|
def __init__(self, txid, nout):
|
|
|
|
if len(txid) != 64:
|
|
|
|
raise TypeError('{} is not a txid'.format(txid))
|
|
|
|
self['txid'] = txid
|
2016-11-28 20:23:10 +01:00
|
|
|
self['nout'] = nout
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
2016-11-28 20:23:10 +01:00
|
|
|
return "{}:{}".format(self['txid'], self['nout'])
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
def __eq__(self, compare):
|
2016-11-28 20:23:10 +01:00
|
|
|
if isinstance(compare, dict):
|
2016-10-14 08:13:37 +02:00
|
|
|
# TODO: lbryum returns nout's in dicts as "nOut" , need to fix this
|
2016-11-28 20:23:10 +01:00
|
|
|
if 'nOut' in compare:
|
|
|
|
return (self['txid'], self['nout']) == (compare['txid'], compare['nOut'])
|
|
|
|
elif 'nout' in compare:
|
|
|
|
return (self['txid'], self['nout']) == (compare['txid'], compare['nout'])
|
2016-10-14 08:13:37 +02:00
|
|
|
else:
|
|
|
|
raise TypeError('cannot compare {}'.format(type(compare)))
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def __ne__(self, compare):
|
2016-11-28 20:23:10 +01:00
|
|
|
return not self.__eq__(compare)
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
class MetaDataStorage(object):
|
|
|
|
def load(self):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def clean_bad_records(self):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claimid_for_tx(self, name, claim_outpoint):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
|
|
|
|
class InMemoryStorage(MetaDataStorage):
|
|
|
|
def __init__(self):
|
|
|
|
self.metadata = {}
|
|
|
|
self.claimids = {}
|
|
|
|
MetaDataStorage.__init__(self)
|
|
|
|
|
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
self.metadata[sd_hash] = (name, claim_outpoint)
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
try:
|
|
|
|
name, claim_outpoint = self.metadata[sd_hash]
|
|
|
|
return defer.succeed((name, claim_outpoint['txid'], claim_outpoint['nout']))
|
|
|
|
except KeyError:
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
self.claimids[(name, claim_outpoint['txid'], claim_outpoint['nout'])] = claim_id
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claimid_for_tx(self, name, claim_outpoint):
|
|
|
|
try:
|
|
|
|
return defer.succeed(
|
|
|
|
self.claimids[(name, claim_outpoint['txid'], claim_outpoint['nout'])])
|
|
|
|
except KeyError:
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
|
|
|
|
class SqliteStorage(MetaDataStorage):
|
|
|
|
def __init__(self, db_dir):
|
|
|
|
self.db_dir = db_dir
|
|
|
|
self.db = None
|
|
|
|
MetaDataStorage.__init__(self)
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
self.db = adbapi.ConnectionPool('sqlite3', os.path.join(self.db_dir, "blockchainname.db"),
|
|
|
|
check_same_thread=False)
|
|
|
|
|
|
|
|
def create_tables(transaction):
|
|
|
|
transaction.execute("create table if not exists name_metadata (" +
|
|
|
|
" name text, " +
|
|
|
|
" txid text, " +
|
|
|
|
" n integer, " +
|
|
|
|
" sd_hash text)")
|
|
|
|
transaction.execute("create table if not exists claim_ids (" +
|
|
|
|
" claimId text, " +
|
|
|
|
" name text, " +
|
|
|
|
" txid text, " +
|
|
|
|
" n integer)")
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
return self.db.runInteraction(create_tables)
|
|
|
|
|
|
|
|
def clean_bad_records(self):
|
|
|
|
d = self.db.runQuery("delete from name_metadata where length(txid) > 64 or txid is null")
|
|
|
|
return d
|
|
|
|
|
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
d = self.db.runQuery(
|
|
|
|
"delete from name_metadata where name=? and txid=? and n=? and sd_hash=?",
|
|
|
|
(name, claim_outpoint['txid'], claim_outpoint['nout'], sd_hash))
|
|
|
|
d.addCallback(
|
|
|
|
lambda _: self.db.runQuery(
|
|
|
|
"delete from name_metadata where name=? and txid=? and n=? and sd_hash=?",
|
|
|
|
(name, claim_outpoint['txid'], UNSET_NOUT, sd_hash)))
|
|
|
|
d.addCallback(
|
|
|
|
lambda _: self.db.runQuery(
|
|
|
|
"insert into name_metadata values (?, ?, ?, ?)",
|
|
|
|
(name, claim_outpoint['txid'], claim_outpoint['nout'], sd_hash)))
|
|
|
|
return d
|
|
|
|
|
|
|
|
@rerun_if_locked
|
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
d = self.db.runQuery("select name, txid, n from name_metadata where sd_hash=?", (sd_hash,))
|
|
|
|
d.addCallback(lambda r: r[0] if r else None)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
d = self.db.runQuery(
|
|
|
|
"delete from claim_ids where claimId=? and name=? and txid=? and n=?",
|
|
|
|
(claim_id, name, claim_outpoint['txid'], claim_outpoint['nout']))
|
|
|
|
d.addCallback(
|
|
|
|
lambda _: self.db.runQuery(
|
|
|
|
"delete from claim_ids where claimId=? and name=? and txid=? and n=?",
|
|
|
|
(claim_id, name, claim_outpoint['txid'], UNSET_NOUT)))
|
|
|
|
d.addCallback(
|
|
|
|
lambda r: self.db.runQuery(
|
|
|
|
"insert into claim_ids values (?, ?, ?, ?)",
|
|
|
|
(claim_id, name, claim_outpoint['txid'], claim_outpoint['nout'])))
|
|
|
|
d.addCallback(lambda _: claim_id)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_claimid_for_tx(self, name, claim_outpoint):
|
|
|
|
d = self.db.runQuery(
|
|
|
|
"select claimId from claim_ids where name=? and txid=? and n=?",
|
|
|
|
(name, claim_outpoint['txid'], claim_outpoint['nout']))
|
|
|
|
d.addCallback(lambda r: r[0][0] if r else None)
|
|
|
|
return d
|
2015-09-22 19:06:20 +02:00
|
|
|
|
|
|
|
|
2016-09-27 20:18:35 +02:00
|
|
|
class Wallet(object):
|
|
|
|
"""This class implements the Wallet interface for the LBRYcrd payment system"""
|
|
|
|
implements(IWallet)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
def __init__(self, storage):
|
|
|
|
if not isinstance(storage, MetaDataStorage):
|
|
|
|
raise ValueError('storage must be an instance of MetaDataStorage')
|
|
|
|
self._storage = storage
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
self.wallet_balance = Decimal(0.0)
|
|
|
|
self.total_reserved_points = Decimal(0.0)
|
|
|
|
self.peer_addresses = {} # {Peer: string}
|
|
|
|
self.queued_payments = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
|
|
|
self.expected_balances = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
|
|
|
self.current_address_given_to_peer = {} # {Peer: address(string)}
|
2016-11-28 20:23:10 +01:00
|
|
|
# (Peer, address(string), amount(Decimal), time(datetime), count(int),
|
|
|
|
# incremental_amount(float))
|
|
|
|
self.expected_balance_at_time = deque()
|
2015-08-20 17:27:15 +02:00
|
|
|
self.max_expected_payment_time = datetime.timedelta(minutes=3)
|
|
|
|
self.stopped = True
|
|
|
|
|
2016-04-14 06:29:40 +02:00
|
|
|
self.is_lagging = None
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
self.manage_running = False
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count = 0
|
|
|
|
self._balance_refresh_time = 3
|
2016-03-18 02:55:06 +01:00
|
|
|
self._batch_count = 20
|
2015-10-29 01:59:07 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def start(self):
|
|
|
|
def start_manage():
|
|
|
|
self.stopped = False
|
|
|
|
self.manage()
|
|
|
|
return True
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
d = self._storage.load()
|
2016-08-08 22:35:55 +02:00
|
|
|
d.addCallback(lambda _: self._clean_bad_records())
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(lambda _: self._start())
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(lambda _: start_manage())
|
|
|
|
return d
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
def _clean_bad_records(self):
|
|
|
|
self._storage.clean_bad_records()
|
|
|
|
|
|
|
|
def _save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
return self._storage.save_name_metadata(name, claim_outpoint, sd_hash)
|
|
|
|
|
|
|
|
def _get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return self._storage.get_claim_metadata_for_sd_hash(sd_hash)
|
|
|
|
|
|
|
|
def _update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
return self._storage.update_claimid(claim_id, name, claim_outpoint)
|
|
|
|
|
|
|
|
def _get_claimid_for_tx(self, name, claim_outpoint):
|
|
|
|
return self._storage.get_claimid_for_tx(name, claim_outpoint)
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
@staticmethod
|
|
|
|
def log_stop_error(err):
|
|
|
|
log.error("An error occurred stopping the wallet: %s", err.getTraceback())
|
|
|
|
|
|
|
|
def stop(self):
|
2016-11-03 20:42:45 +01:00
|
|
|
log.info("Stopping %s", self)
|
2015-08-20 17:27:15 +02:00
|
|
|
self.stopped = True
|
|
|
|
# If self.next_manage_call is None, then manage is currently running or else
|
|
|
|
# start has not been called, so set stopped and do nothing else.
|
|
|
|
if self.next_manage_call is not None:
|
|
|
|
self.next_manage_call.cancel()
|
|
|
|
self.next_manage_call = None
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d = self.manage(do_full=True)
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addErrback(self.log_stop_error)
|
|
|
|
d.addCallback(lambda _: self._stop())
|
|
|
|
d.addErrback(self.log_stop_error)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def manage(self, do_full=False):
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
have_set_manage_running = [False]
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count += 1
|
|
|
|
if self._manage_count % self._batch_count == 0:
|
|
|
|
self._manage_count = 0
|
|
|
|
do_full = True
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def check_if_manage_running():
|
|
|
|
|
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def fire_if_not_running():
|
|
|
|
if self.manage_running is False:
|
|
|
|
self.manage_running = True
|
|
|
|
have_set_manage_running[0] = True
|
|
|
|
d.callback(True)
|
2016-03-18 01:19:13 +01:00
|
|
|
elif do_full is False:
|
|
|
|
d.callback(False)
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
|
|
|
task.deferLater(reactor, 1, fire_if_not_running)
|
|
|
|
|
|
|
|
fire_if_not_running()
|
|
|
|
return d
|
|
|
|
|
|
|
|
d = check_if_manage_running()
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def do_manage():
|
|
|
|
if do_full:
|
|
|
|
d = self._check_expected_balances()
|
|
|
|
d.addCallback(lambda _: self._send_payments())
|
|
|
|
else:
|
|
|
|
d = defer.succeed(True)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(lambda _: self.get_balance())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def set_wallet_balance(balance):
|
2016-03-20 03:39:47 +01:00
|
|
|
if self.wallet_balance != balance:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Got a new balance: %s", str(balance))
|
2016-03-18 01:19:13 +01:00
|
|
|
self.wallet_balance = balance
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
def log_error(err):
|
|
|
|
if isinstance(err, AttributeError):
|
|
|
|
log.warning("Failed to get an updated balance")
|
|
|
|
log.warning("Last balance update: %s", str(self.wallet_balance))
|
|
|
|
|
|
|
|
d.addCallbacks(set_wallet_balance, log_error)
|
2016-03-18 01:19:13 +01:00
|
|
|
return d
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(lambda should_run: do_manage() if should_run else None)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def set_next_manage_call():
|
|
|
|
if not self.stopped:
|
2016-03-18 01:19:13 +01:00
|
|
|
self.next_manage_call = reactor.callLater(self._balance_refresh_time, self.manage)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
d.addCallback(lambda _: set_next_manage_call())
|
|
|
|
|
|
|
|
def log_error(err):
|
2016-11-28 20:23:10 +01:00
|
|
|
log.error("Something went wrong during manage. Error message: %s",
|
|
|
|
err.getErrorMessage())
|
2015-08-20 17:27:15 +02:00
|
|
|
return err
|
|
|
|
|
|
|
|
d.addErrback(log_error)
|
|
|
|
|
|
|
|
def set_manage_not_running(arg):
|
|
|
|
if have_set_manage_running[0] is True:
|
|
|
|
self.manage_running = False
|
|
|
|
return arg
|
|
|
|
|
|
|
|
d.addBoth(set_manage_not_running)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_info_exchanger(self):
|
|
|
|
return LBRYcrdAddressRequester(self)
|
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
|
|
|
return LBRYcrdAddressQueryHandlerFactory(self)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def reserve_points(self, identifier, amount):
|
2016-11-28 20:23:10 +01:00
|
|
|
"""Ensure a certain amount of points are available to be sent as
|
|
|
|
payment, before the service is rendered
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
@param identifier: The peer to which the payment will ultimately be sent
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
@param amount: The amount of points to reserve
|
|
|
|
|
2016-11-28 20:23:10 +01:00
|
|
|
@return: A ReservedPoints object which is given to send_points
|
|
|
|
once the service has been rendered
|
2015-08-20 17:27:15 +02:00
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
if self.wallet_balance >= self.total_reserved_points + rounded_amount:
|
|
|
|
self.total_reserved_points += rounded_amount
|
2015-09-22 18:08:17 +02:00
|
|
|
return ReservedPoints(identifier, rounded_amount)
|
2015-08-20 17:27:15 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def cancel_point_reservation(self, reserved_points):
|
|
|
|
"""
|
|
|
|
Return all of the points that were reserved previously for some ReservedPoints object
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints previously returned by reserve_points
|
|
|
|
|
|
|
|
@return: None
|
|
|
|
"""
|
|
|
|
self.total_reserved_points -= reserved_points.amount
|
|
|
|
|
|
|
|
def send_points(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to a peer
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send, must be less than or equal to the
|
|
|
|
amount reserved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
peer = reserved_points.identifier
|
2017-01-25 17:44:21 +01:00
|
|
|
assert rounded_amount <= reserved_points.amount
|
|
|
|
assert peer in self.peer_addresses
|
2015-08-20 17:27:15 +02:00
|
|
|
self.queued_payments[self.peer_addresses[peer]] += rounded_amount
|
|
|
|
# make any unused points available
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("ordering that %s points be sent to %s", str(rounded_amount),
|
2017-01-25 17:44:21 +01:00
|
|
|
str(self.peer_addresses[peer]))
|
2015-08-20 17:27:15 +02:00
|
|
|
peer.update_stats('points_sent', amount)
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def send_points_to_address(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to an address
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send. must be less than or equal to the
|
|
|
|
amount reselved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
address = reserved_points.identifier
|
2017-01-25 17:44:21 +01:00
|
|
|
assert rounded_amount <= reserved_points.amount
|
2015-09-22 18:08:17 +02:00
|
|
|
self.queued_payments[address] += rounded_amount
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Ordering that %s points be sent to %s", str(rounded_amount),
|
2017-01-25 17:44:21 +01:00
|
|
|
str(address))
|
2015-09-22 18:08:17 +02:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def add_expected_payment(self, peer, amount):
|
|
|
|
"""Increase the number of points expected to be paid by a peer"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
2017-01-25 17:44:21 +01:00
|
|
|
assert peer in self.current_address_given_to_peer
|
2015-08-20 17:27:15 +02:00
|
|
|
address = self.current_address_given_to_peer[peer]
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("expecting a payment at address %s in the amount of %s",
|
2017-01-25 17:44:21 +01:00
|
|
|
str(address), str(rounded_amount))
|
2015-08-20 17:27:15 +02:00
|
|
|
self.expected_balances[address] += rounded_amount
|
|
|
|
expected_balance = self.expected_balances[address]
|
|
|
|
expected_time = datetime.datetime.now() + self.max_expected_payment_time
|
2016-11-28 20:23:10 +01:00
|
|
|
self.expected_balance_at_time.append(
|
|
|
|
(peer, address, expected_balance, expected_time, 0, amount))
|
2015-08-20 17:27:15 +02:00
|
|
|
peer.update_stats('expected_points', amount)
|
|
|
|
|
|
|
|
def update_peer_address(self, peer, address):
|
|
|
|
self.peer_addresses[peer] = address
|
|
|
|
|
|
|
|
def get_new_address_for_peer(self, peer):
|
|
|
|
def set_address_for_peer(address):
|
|
|
|
self.current_address_given_to_peer[peer] = address
|
|
|
|
return address
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self.get_new_address()
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(set_address_for_peer)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_payments(self):
|
|
|
|
payments_to_send = {}
|
|
|
|
for address, points in self.queued_payments.items():
|
2016-08-27 08:42:20 +02:00
|
|
|
if points > 0:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Should be sending %s points to %s", str(points), str(address))
|
2016-08-27 08:42:20 +02:00
|
|
|
payments_to_send[address] = points
|
|
|
|
self.total_reserved_points -= points
|
|
|
|
self.wallet_balance -= points
|
|
|
|
else:
|
|
|
|
log.info("Skipping dust")
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
del self.queued_payments[address]
|
2016-08-27 08:42:20 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
if payments_to_send:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Creating a transaction with outputs %s", str(payments_to_send))
|
2016-02-19 06:44:08 +01:00
|
|
|
d = self._do_send_many(payments_to_send)
|
|
|
|
d.addCallback(lambda txid: log.debug("Sent transaction %s", txid))
|
|
|
|
return d
|
2016-08-23 01:56:42 +02:00
|
|
|
log.debug("There were no payments to send")
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_stream_info_for_name(self, name):
|
|
|
|
d = self._get_value_for_name(name)
|
|
|
|
d.addCallback(self._get_stream_info_from_value, name)
|
|
|
|
return d
|
|
|
|
|
2016-05-30 21:49:25 +02:00
|
|
|
def get_txid_for_name(self, name):
|
|
|
|
d = self._get_value_for_name(name)
|
|
|
|
d.addCallback(lambda r: None if 'txid' not in r else r['txid'])
|
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_stream_info_from_claim_outpoint(self, name, txid, nout):
|
2016-11-28 20:23:10 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(txid, nout)
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self.get_claims_from_tx(claim_outpoint['txid'])
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_claim_for_name(claims):
|
|
|
|
for claim in claims:
|
2016-11-28 20:23:10 +01:00
|
|
|
if claim_outpoint == claim:
|
2016-02-17 05:10:26 +01:00
|
|
|
claim['txid'] = txid
|
|
|
|
return claim
|
2015-08-20 17:27:15 +02:00
|
|
|
return Failure(UnknownNameError(name))
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(get_claim_for_name)
|
|
|
|
d.addCallback(self._get_stream_info_from_value, name)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _get_stream_info_from_value(self, result, name):
|
2016-07-28 20:55:17 +02:00
|
|
|
def _check_result_fields(r):
|
|
|
|
for k in ['value', 'txid', 'n', 'height', 'amount']:
|
|
|
|
assert k in r, "getvalueforname response missing field %s" % k
|
2016-11-04 21:09:40 +01:00
|
|
|
|
2016-08-05 00:44:12 +02:00
|
|
|
def _log_success(claim_id):
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("lbry://%s complies with %s, claimid: %s", name, metadata.version, claim_id)
|
2016-08-05 00:44:12 +02:00
|
|
|
return defer.succeed(None)
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-07-28 20:55:17 +02:00
|
|
|
if 'error' in result:
|
2016-02-19 06:44:08 +01:00
|
|
|
log.warning("Got an error looking up a name: %s", result['error'])
|
2016-06-27 23:07:59 +02:00
|
|
|
return Failure(UnknownNameError(name))
|
2016-07-28 20:55:17 +02:00
|
|
|
_check_result_fields(result)
|
|
|
|
try:
|
2016-11-04 21:09:40 +01:00
|
|
|
metadata = Metadata(json.loads(result['value']))
|
2016-11-01 21:35:44 +01:00
|
|
|
except (TypeError, ValueError, ValidationError):
|
2016-11-04 21:09:40 +01:00
|
|
|
return Failure(InvalidStreamInfoError(name, result['value']))
|
2016-08-05 00:44:12 +02:00
|
|
|
sd_hash = metadata['sources']['lbry_sd_hash']
|
2016-12-14 00:08:29 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(result['txid'], result['n'])
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self._save_name_metadata(name, claim_outpoint, sd_hash)
|
2016-11-28 20:23:10 +01:00
|
|
|
d.addCallback(lambda _: self.get_claimid(name, result['txid'], result['n']))
|
2016-08-05 00:44:12 +02:00
|
|
|
d.addCallback(lambda cid: _log_success(cid))
|
2016-07-28 20:55:17 +02:00
|
|
|
d.addCallback(lambda _: metadata)
|
|
|
|
return d
|
|
|
|
|
2016-08-08 07:46:19 +02:00
|
|
|
def get_claim(self, name, claim_id):
|
|
|
|
d = self.get_claims_for_name(name)
|
2017-01-06 15:04:18 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda claims: next(
|
|
|
|
claim for claim in claims['claims'] if claim['claimId'] == claim_id))
|
2016-08-05 00:44:12 +02:00
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_claimid(self, name, txid, nout):
|
2016-08-05 00:44:12 +02:00
|
|
|
def _get_id_for_return(claim_id):
|
|
|
|
if claim_id:
|
|
|
|
return defer.succeed(claim_id)
|
|
|
|
else:
|
|
|
|
d = self.get_claims_from_tx(txid)
|
2017-01-06 15:04:18 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda claims: next(
|
|
|
|
c for c in claims if c['name'] == name and
|
|
|
|
c['nOut'] == claim_outpoint['nout']))
|
|
|
|
d.addCallback(
|
|
|
|
lambda claim: self._update_claimid(
|
|
|
|
claim['claimId'], name, ClaimOutpoint(txid, claim['nOut'])))
|
2016-08-05 00:44:12 +02:00
|
|
|
return d
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-12-14 00:08:29 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(txid, nout)
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self._get_claimid_for_tx(name, claim_outpoint)
|
2016-08-05 00:44:12 +02:00
|
|
|
d.addCallback(_get_id_for_return)
|
2016-07-28 20:55:17 +02:00
|
|
|
return d
|
|
|
|
|
2016-09-16 02:15:20 +02:00
|
|
|
def get_my_claim(self, name):
|
|
|
|
def _get_claim_for_return(claim):
|
|
|
|
if not claim:
|
|
|
|
return False
|
2016-11-09 21:19:58 +01:00
|
|
|
claim['value'] = json.loads(claim['value'])
|
2016-12-05 23:17:17 +01:00
|
|
|
return claim
|
2016-09-16 02:15:20 +02:00
|
|
|
|
2016-09-22 04:10:19 +02:00
|
|
|
def _get_my_unspent_claim(claims):
|
|
|
|
for claim in claims:
|
2017-01-06 15:04:18 +01:00
|
|
|
is_unspent = (
|
|
|
|
claim['name'] == name and
|
|
|
|
not claim['is spent'] and
|
|
|
|
not claim.get('supported_claimid', False)
|
|
|
|
)
|
|
|
|
if is_unspent:
|
2016-09-22 04:10:19 +02:00
|
|
|
return claim
|
|
|
|
return False
|
|
|
|
|
2016-09-16 02:15:20 +02:00
|
|
|
d = self.get_name_claims()
|
2016-09-22 04:10:19 +02:00
|
|
|
d.addCallback(_get_my_unspent_claim)
|
2016-09-16 02:15:20 +02:00
|
|
|
d.addCallback(_get_claim_for_return)
|
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_claim_info(self, name, txid=None, nout=None):
|
|
|
|
if txid is None or nout is None:
|
2016-08-08 07:46:19 +02:00
|
|
|
d = self._get_value_for_name(name)
|
2016-11-28 20:23:10 +01:00
|
|
|
d.addCallback(lambda r: self._get_claim_info(name, ClaimOutpoint(r['txid'], r['n'])))
|
|
|
|
else:
|
|
|
|
d = self._get_claim_info(name, ClaimOutpoint(txid, nout))
|
2016-08-08 07:46:19 +02:00
|
|
|
d.addErrback(lambda _: False)
|
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _format_claim_for_return(self, name, claim, metadata=None, meta_version=None):
|
2016-09-16 02:15:20 +02:00
|
|
|
result = {}
|
|
|
|
result['claim_id'] = claim['claimId']
|
2016-09-24 06:58:54 +02:00
|
|
|
result['amount'] = claim['nEffectiveAmount']
|
2016-09-16 02:15:20 +02:00
|
|
|
result['height'] = claim['nHeight']
|
|
|
|
result['name'] = name
|
2016-10-14 08:13:37 +02:00
|
|
|
result['txid'] = claim['txid']
|
|
|
|
result['nout'] = claim['n']
|
2016-09-16 02:15:20 +02:00
|
|
|
result['value'] = metadata if metadata else json.loads(claim['value'])
|
2017-01-06 15:04:18 +01:00
|
|
|
result['supports'] = [
|
|
|
|
{'txid': support['txid'], 'n': support['n']} for support in claim['supports']]
|
|
|
|
result['meta_version'] = (
|
|
|
|
meta_version if meta_version else result['value'].get('ver', '0.0.1'))
|
2016-09-16 02:15:20 +02:00
|
|
|
return result
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _get_claim_info(self, name, claim_outpoint):
|
2016-08-08 07:46:19 +02:00
|
|
|
def _build_response(claim):
|
|
|
|
try:
|
|
|
|
metadata = Metadata(json.loads(claim['value']))
|
2016-08-16 00:13:36 +02:00
|
|
|
meta_ver = metadata.version
|
2016-08-08 07:46:19 +02:00
|
|
|
sd_hash = metadata['sources']['lbry_sd_hash']
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self._save_name_metadata(name, claim_outpoint, sd_hash)
|
2016-11-01 21:35:44 +01:00
|
|
|
except (TypeError, ValueError, ValidationError):
|
2016-08-08 07:46:19 +02:00
|
|
|
metadata = claim['value']
|
|
|
|
meta_ver = "Non-compliant"
|
|
|
|
d = defer.succeed(None)
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
d.addCallback(lambda _: self._format_claim_for_return(name,
|
|
|
|
claim,
|
|
|
|
metadata=metadata,
|
|
|
|
meta_version=meta_ver))
|
2017-01-06 15:04:18 +01:00
|
|
|
log.info(
|
|
|
|
"get claim info lbry://%s metadata: %s, claimid: %s",
|
|
|
|
name, meta_ver, claim['claimId'])
|
2016-09-16 02:15:20 +02:00
|
|
|
return d
|
2016-08-08 07:46:19 +02:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self.get_claimid(name, claim_outpoint['txid'], claim_outpoint['nout'])
|
2016-08-08 07:46:19 +02:00
|
|
|
d.addCallback(lambda claim_id: self.get_claim(name, claim_id))
|
|
|
|
d.addCallback(_build_response)
|
|
|
|
return d
|
|
|
|
|
2016-08-08 07:42:37 +02:00
|
|
|
def get_claims_for_name(self, name):
|
|
|
|
d = self._get_claims_for_name(name)
|
2016-07-28 20:55:17 +02:00
|
|
|
return d
|
|
|
|
|
2016-08-07 22:13:47 +02:00
|
|
|
def update_metadata(self, new_metadata, old_metadata):
|
|
|
|
meta_for_return = old_metadata if isinstance(old_metadata, dict) else {}
|
|
|
|
for k in new_metadata:
|
|
|
|
meta_for_return[k] = new_metadata[k]
|
2016-08-09 03:32:39 +02:00
|
|
|
return defer.succeed(Metadata(meta_for_return))
|
2015-10-28 06:38:01 +01:00
|
|
|
|
2017-01-16 20:05:16 +01:00
|
|
|
def _process_claim_out(self, claim_out):
|
|
|
|
claim_out.pop('success')
|
|
|
|
claim_out['fee'] = float(claim_out['fee'])
|
|
|
|
return claim_out
|
|
|
|
|
2017-01-06 18:17:49 +01:00
|
|
|
"""
|
|
|
|
Claim a name, update if name already claimed by user
|
|
|
|
@param name: name to claim
|
2016-12-02 18:57:18 +01:00
|
|
|
|
2017-01-06 18:17:49 +01:00
|
|
|
@param bid: bid amount
|
|
|
|
|
|
|
|
@param m: metadata
|
|
|
|
|
|
|
|
@return: Deferred which returns a dict containing below items
|
|
|
|
txid - txid of the resulting transaction
|
|
|
|
nout - nout of the resulting claim
|
|
|
|
fee - transaction fee paid to make claim
|
|
|
|
claim_id - claim id of the claim
|
|
|
|
|
|
|
|
"""
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-08-07 22:13:47 +02:00
|
|
|
def claim_name(self, name, bid, m):
|
2016-10-14 08:13:37 +02:00
|
|
|
def _save_metadata(claim_out, metadata):
|
|
|
|
if not claim_out['success']:
|
2016-11-28 20:23:10 +01:00
|
|
|
msg = 'Claim to name {} failed: {}'.format(name, claim_out['reason'])
|
2016-12-01 01:13:41 +01:00
|
|
|
raise Exception(msg)
|
2017-01-16 20:05:16 +01:00
|
|
|
claim_out = self._process_claim_out(claim_out)
|
2016-11-28 20:23:10 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(claim_out['txid'], claim_out['nout'])
|
2017-01-06 15:04:18 +01:00
|
|
|
log.info("Saving metadata for claim %s %d",
|
|
|
|
claim_outpoint['txid'], claim_outpoint['nout'])
|
2016-10-14 08:13:37 +02:00
|
|
|
d = self._save_name_metadata(name, claim_outpoint, metadata['sources']['lbry_sd_hash'])
|
|
|
|
d.addCallback(lambda _: claim_out)
|
2015-10-28 06:38:01 +01:00
|
|
|
return d
|
|
|
|
|
2016-08-08 07:46:19 +02:00
|
|
|
def _claim_or_update(claim, metadata, _bid):
|
|
|
|
if not claim:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("No own claim yet, making a new one")
|
2016-08-29 20:33:56 +02:00
|
|
|
return self._send_name_claim(name, metadata, _bid)
|
2016-08-08 07:46:19 +02:00
|
|
|
else:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Updating over own claim")
|
2016-08-09 03:32:39 +02:00
|
|
|
d = self.update_metadata(metadata, claim['value'])
|
2016-11-28 20:23:10 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(claim['txid'], claim['nOut'])
|
2017-01-06 15:04:18 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda new_metadata: self._send_name_claim_update(name, claim['claim_id'],
|
|
|
|
claim_outpoint,
|
|
|
|
new_metadata, _bid))
|
2017-01-13 18:19:26 +01:00
|
|
|
d.addCallback(lambda claim_out: claim_out.update({'claim_id': claim['claim_id']}))
|
2016-08-09 03:32:39 +02:00
|
|
|
return d
|
2016-08-08 07:46:19 +02:00
|
|
|
|
|
|
|
meta = Metadata(m)
|
2016-11-01 00:08:41 +01:00
|
|
|
d = self.get_my_claim(name)
|
2016-08-08 07:46:19 +02:00
|
|
|
d.addCallback(lambda claim: _claim_or_update(claim, meta, bid))
|
2016-10-14 08:13:37 +02:00
|
|
|
d.addCallback(lambda claim_out: _save_metadata(claim_out, meta))
|
2015-10-28 06:38:01 +01:00
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def abandon_claim(self, txid, nout):
|
2016-12-02 18:57:18 +01:00
|
|
|
def _parse_abandon_claim_out(claim_out):
|
|
|
|
if not claim_out['success']:
|
|
|
|
msg = 'Abandon of {}:{} failed: {}'.format(txid, nout, claim_out['resason'])
|
|
|
|
raise Exception(msg)
|
2017-01-16 20:05:16 +01:00
|
|
|
claim_out = self._process_claim_out(claim_out)
|
2016-12-02 18:57:18 +01:00
|
|
|
return defer.succeed(claim_out)
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
claim_outpoint = ClaimOutpoint(txid, nout)
|
2016-12-02 18:57:18 +01:00
|
|
|
d = self._abandon_claim(claim_outpoint)
|
|
|
|
d.addCallback(lambda claim_out: _parse_abandon_claim_out(claim_out))
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
def support_claim(self, name, claim_id, amount):
|
2016-12-02 18:57:18 +01:00
|
|
|
def _parse_support_claim_out(claim_out):
|
|
|
|
if not claim_out['success']:
|
|
|
|
msg = 'Support of {}:{} failed: {}'.format(name, claim_id, claim_out['reason'])
|
|
|
|
raise Exception(msg)
|
2017-01-16 20:05:16 +01:00
|
|
|
claim_out = self._process_claim_out(claim_out)
|
2016-12-02 18:57:18 +01:00
|
|
|
return defer.succeed(claim_out)
|
|
|
|
|
|
|
|
d = self._support_claim(name, claim_id, amount)
|
|
|
|
d.addCallback(lambda claim_out: _parse_support_claim_out(claim_out))
|
|
|
|
return d
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2016-09-09 06:48:54 +02:00
|
|
|
def get_block_info(self, height):
|
|
|
|
d = self._get_blockhash(height)
|
|
|
|
return d
|
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def get_history(self):
|
|
|
|
d = self._get_history()
|
|
|
|
return d
|
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def address_is_mine(self, address):
|
|
|
|
d = self._address_is_mine(address)
|
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_transaction(self, txid):
|
|
|
|
d = self._get_transaction(txid)
|
2016-08-23 00:43:52 +02:00
|
|
|
return d
|
|
|
|
|
2016-10-03 22:37:27 +02:00
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return self._get_claim_metadata_for_sd_hash(sd_hash)
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_name_and_validity_for_sd_hash(self, sd_hash):
|
2016-10-14 08:13:37 +02:00
|
|
|
def _get_status_of_claim(name_txid, sd_hash):
|
|
|
|
if name_txid:
|
2016-11-28 20:23:10 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(name_txid[1], name_txid[2])
|
2016-10-14 08:13:37 +02:00
|
|
|
name = name_txid[0]
|
|
|
|
return self._get_status_of_claim(claim_outpoint, name, sd_hash)
|
|
|
|
else:
|
2016-11-28 20:23:10 +01:00
|
|
|
return None
|
2016-10-14 08:13:37 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self._get_claim_metadata_for_sd_hash(sd_hash)
|
2016-10-14 08:13:37 +02:00
|
|
|
d.addCallback(lambda name_txid: _get_status_of_claim(name_txid, sd_hash))
|
2016-02-17 05:10:26 +01:00
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_available_balance(self):
|
|
|
|
return float(self.wallet_balance - self.total_reserved_points)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _get_status_of_claim(self, claim_outpoint, name, sd_hash):
|
|
|
|
d = self.get_claims_from_tx(claim_outpoint['txid'])
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_status(claims):
|
|
|
|
if claims is None:
|
|
|
|
claims = []
|
|
|
|
for claim in claims:
|
|
|
|
if 'in claim trie' in claim:
|
2016-10-14 08:13:37 +02:00
|
|
|
name_is_equal = 'name' in claim and str(claim['name']) == name
|
2016-12-14 00:08:29 +01:00
|
|
|
nout_is_equal = 'nOut' in claim and claim['nOut'] == claim_outpoint['nout']
|
2016-10-14 08:13:37 +02:00
|
|
|
if name_is_equal and nout_is_equal and 'value' in claim:
|
2016-02-17 05:10:26 +01:00
|
|
|
try:
|
|
|
|
value_dict = json.loads(claim['value'])
|
2016-03-12 20:08:15 +01:00
|
|
|
except (ValueError, TypeError):
|
2016-02-17 05:10:26 +01:00
|
|
|
return None
|
2016-03-12 20:25:46 +01:00
|
|
|
claim_sd_hash = None
|
|
|
|
if 'stream_hash' in value_dict:
|
|
|
|
claim_sd_hash = str(value_dict['stream_hash'])
|
|
|
|
if 'sources' in value_dict and 'lbrynet_sd_hash' in value_dict['sources']:
|
|
|
|
claim_sd_hash = str(value_dict['sources']['lbry_sd_hash'])
|
|
|
|
if claim_sd_hash is not None and claim_sd_hash == sd_hash:
|
2016-02-17 05:10:26 +01:00
|
|
|
if 'is controlling' in claim and claim['is controlling']:
|
|
|
|
return name, "valid"
|
|
|
|
if claim['in claim trie']:
|
|
|
|
return name, "invalid"
|
|
|
|
if 'in queue' in claim and claim['in queue']:
|
|
|
|
return name, "pending"
|
|
|
|
return name, "unconfirmed"
|
|
|
|
return None
|
|
|
|
|
|
|
|
d.addCallback(get_status)
|
|
|
|
return d
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _check_expected_balances(self):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
balances_to_check = []
|
|
|
|
try:
|
|
|
|
while self.expected_balance_at_time[0][3] < now:
|
|
|
|
balances_to_check.append(self.expected_balance_at_time.popleft())
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
ds = []
|
|
|
|
for balance_to_check in balances_to_check:
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Checking balance of address %s", str(balance_to_check[1]))
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self._get_balance_for_address(balance_to_check[1])
|
|
|
|
d.addCallback(lambda bal: bal >= balance_to_check[2])
|
|
|
|
ds.append(d)
|
|
|
|
dl = defer.DeferredList(ds)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def handle_checks(results):
|
|
|
|
from future_builtins import zip
|
|
|
|
for balance, (success, result) in zip(balances_to_check, results):
|
|
|
|
peer = balance[0]
|
|
|
|
if success is True:
|
|
|
|
if result is False:
|
|
|
|
if balance[4] <= 1: # first or second strike, give them another chance
|
2017-01-25 23:00:21 +01:00
|
|
|
new_expected_balance = (
|
|
|
|
balance[0],
|
|
|
|
balance[1],
|
|
|
|
balance[2],
|
|
|
|
datetime.datetime.now() + self.max_expected_payment_time,
|
|
|
|
balance[4] + 1,
|
|
|
|
balance[5]
|
|
|
|
)
|
2016-02-17 05:10:26 +01:00
|
|
|
self.expected_balance_at_time.append(new_expected_balance)
|
|
|
|
peer.update_score(-5.0)
|
|
|
|
else:
|
|
|
|
peer.update_score(-50.0)
|
|
|
|
else:
|
|
|
|
if balance[4] == 0:
|
|
|
|
peer.update_score(balance[5])
|
|
|
|
peer.update_stats('points_received', balance[5])
|
|
|
|
else:
|
|
|
|
log.warning("Something went wrong checking a balance. Peer: %s, account: %s,"
|
|
|
|
"expected balance: %s, expected time: %s, count: %s, error: %s",
|
|
|
|
str(balance[0]), str(balance[1]), str(balance[2]), str(balance[3]),
|
|
|
|
str(balance[4]), str(result.getErrorMessage()))
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
dl.addCallback(handle_checks)
|
|
|
|
return dl
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== Must be overridden ======== #
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def get_balance(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def get_new_address(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_block(self, blockhash):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-12-04 02:56:56 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_best_blockhash(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def get_name_claims(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-08 07:42:37 +02:00
|
|
|
def _get_claims_for_name(self, name):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_name_claim(self, name, val, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _abandon_claim(self, claim_outpoint):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _send_name_claim_update(self, name, claim_id, claim_outpoint, value, amount):
|
2016-06-28 20:28:59 +02:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
def _support_claim(self, name, claim_id, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _do_send_many(self, payments_to_send):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_claims_from_tx(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def _get_history(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def _address_is_mine(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _start(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _stop(self):
|
|
|
|
pass
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
|
2016-09-27 20:18:35 +02:00
|
|
|
class LBRYumWallet(Wallet):
|
2016-12-01 06:28:25 +01:00
|
|
|
def __init__(self, storage, config=None):
|
|
|
|
Wallet.__init__(self, storage)
|
2016-10-04 20:58:44 +02:00
|
|
|
self._config = config
|
2016-02-19 06:44:08 +01:00
|
|
|
self.network = None
|
|
|
|
self.wallet = None
|
2017-01-03 20:13:01 +01:00
|
|
|
self.is_first_run = False
|
2016-02-23 05:31:07 +01:00
|
|
|
self.printed_retrieving_headers = False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = None
|
|
|
|
self._catch_up_check = None
|
|
|
|
self._caught_up_counter = 0
|
2016-04-18 01:54:04 +02:00
|
|
|
self._lag_counter = 0
|
2017-01-03 20:13:01 +01:00
|
|
|
self.blocks_behind = 0
|
2016-04-12 08:03:57 +02:00
|
|
|
self.catchup_progress = 0
|
|
|
|
self.max_behind = 0
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _start(self):
|
|
|
|
network_start_d = defer.Deferred()
|
|
|
|
|
|
|
|
def setup_network():
|
2016-10-04 20:58:44 +02:00
|
|
|
self.config = make_config(self._config)
|
2016-02-19 06:44:08 +01:00
|
|
|
self.network = Network(self.config)
|
2017-01-04 23:10:36 +01:00
|
|
|
alert.info("Loading the wallet")
|
2016-02-19 06:44:08 +01:00
|
|
|
return defer.succeed(self.network.start())
|
|
|
|
|
|
|
|
d = setup_network()
|
|
|
|
|
|
|
|
def check_started():
|
|
|
|
if self.network.is_connecting():
|
2017-01-25 23:00:21 +01:00
|
|
|
if not self.printed_retrieving_headers and \
|
|
|
|
self.network.blockchain.retrieving_headers:
|
2017-01-04 23:10:36 +01:00
|
|
|
alert.info("Running the wallet for the first time. This may take a moment.")
|
2016-02-23 05:31:07 +01:00
|
|
|
self.printed_retrieving_headers = True
|
2016-02-19 06:44:08 +01:00
|
|
|
return False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
2016-02-19 06:44:08 +01:00
|
|
|
if self.network.is_connected():
|
|
|
|
network_start_d.callback(True)
|
|
|
|
else:
|
|
|
|
network_start_d.errback(ValueError("Failed to connect to network."))
|
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = task.LoopingCall(check_started)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-12-19 21:24:14 +01:00
|
|
|
d.addCallback(lambda _: self._load_wallet())
|
|
|
|
d.addCallback(self._save_wallet)
|
2016-03-23 03:42:45 +01:00
|
|
|
d.addCallback(lambda _: self._start_check.start(.1))
|
2016-02-19 06:44:08 +01:00
|
|
|
d.addCallback(lambda _: network_start_d)
|
2016-12-19 21:24:14 +01:00
|
|
|
d.addCallback(lambda _: self._load_blockchain())
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _stop(self):
|
2016-03-23 03:42:45 +01:00
|
|
|
if self._start_check is not None:
|
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
|
|
|
|
|
|
|
if self._catch_up_check is not None:
|
2016-10-19 06:12:44 +02:00
|
|
|
if self._catch_up_check.running:
|
|
|
|
self._catch_up_check.stop()
|
2016-03-23 03:42:45 +01:00
|
|
|
self._catch_up_check = None
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def check_stopped():
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
if self.network.is_connected():
|
|
|
|
return False
|
2016-02-19 06:44:08 +01:00
|
|
|
stop_check.stop()
|
|
|
|
self.network = None
|
|
|
|
d.callback(True)
|
|
|
|
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
self.network.stop()
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
stop_check = task.LoopingCall(check_stopped)
|
|
|
|
stop_check.start(.1)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _load_wallet(self):
|
2016-12-19 21:24:14 +01:00
|
|
|
path = self.config.get_wallet_path()
|
|
|
|
storage = lbryum.wallet.WalletStorage(path)
|
|
|
|
wallet = lbryum.wallet.Wallet(storage)
|
|
|
|
if not storage.file_exists:
|
2017-01-03 20:13:01 +01:00
|
|
|
self.is_first_run = True
|
2016-12-19 21:24:14 +01:00
|
|
|
seed = wallet.make_seed()
|
|
|
|
wallet.add_seed(seed, None)
|
|
|
|
wallet.create_master_keys(None)
|
|
|
|
wallet.create_main_account()
|
|
|
|
wallet.synchronize()
|
|
|
|
self.wallet = wallet
|
2017-01-24 00:55:20 +01:00
|
|
|
self._check_large_wallet()
|
2016-12-19 21:24:14 +01:00
|
|
|
return defer.succeed(True)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2017-01-24 00:55:20 +01:00
|
|
|
def _check_large_wallet(self):
|
|
|
|
if len(self.wallet.addresses(include_change=False)) > 1000:
|
|
|
|
log.warning("Your wallet is excessively large, please follow instructions here: \
|
|
|
|
https://github.com/lbryio/lbry/issues/437 to reduce your wallet size")
|
|
|
|
|
2016-12-19 21:24:14 +01:00
|
|
|
def _load_blockchain(self):
|
2016-03-12 20:58:58 +01:00
|
|
|
blockchain_caught_d = defer.Deferred()
|
|
|
|
|
|
|
|
def check_caught_up():
|
2016-07-20 23:32:15 +02:00
|
|
|
local_height = self.network.get_catchup_progress()
|
2016-03-12 20:58:58 +01:00
|
|
|
remote_height = self.network.get_server_height()
|
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if remote_height == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
height_diff = remote_height - local_height
|
|
|
|
|
|
|
|
if height_diff <= 5:
|
|
|
|
self.blocks_behind = 0
|
2016-03-23 03:42:45 +01:00
|
|
|
msg = ""
|
|
|
|
if self._caught_up_counter != 0:
|
|
|
|
msg += "All caught up. "
|
|
|
|
msg += "Wallet loaded."
|
|
|
|
alert.info(msg)
|
|
|
|
self._catch_up_check.stop()
|
|
|
|
self._catch_up_check = None
|
2016-03-12 20:58:58 +01:00
|
|
|
blockchain_caught_d.callback(True)
|
2017-01-03 20:13:01 +01:00
|
|
|
return
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if height_diff < self.blocks_behind:
|
|
|
|
# We're making progress in catching up
|
|
|
|
self._lag_counter = 0
|
|
|
|
self.is_lagging = False
|
|
|
|
else:
|
|
|
|
# No progress. Might be lagging
|
|
|
|
self._lag_counter += 1
|
|
|
|
if self._lag_counter >= 900:
|
|
|
|
self.is_lagging = True
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
self.blocks_behind = height_diff
|
2016-04-12 08:03:57 +02:00
|
|
|
|
2017-01-03 20:13:01 +01:00
|
|
|
if self.blocks_behind > self.max_behind:
|
|
|
|
self.max_behind = self.blocks_behind
|
|
|
|
self.catchup_progress = int(100 * (self.blocks_behind / (5 + self.max_behind)))
|
|
|
|
if self._caught_up_counter == 0:
|
|
|
|
alert.info('Catching up with the blockchain')
|
|
|
|
if self._caught_up_counter % 30 == 0:
|
|
|
|
alert.info('Blocks left: %d', (remote_height - local_height))
|
|
|
|
|
|
|
|
self._caught_up_counter += 1
|
2016-03-23 03:42:45 +01:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
def log_error(err):
|
|
|
|
log.warning(err.getErrorMessage())
|
|
|
|
return defer.fail(err)
|
2016-03-12 20:58:58 +01:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._catch_up_check = task.LoopingCall(check_caught_up)
|
2016-12-19 21:24:14 +01:00
|
|
|
d = defer.succeed(self.wallet.start_threads(self.network))
|
2016-03-23 03:42:45 +01:00
|
|
|
d.addCallback(lambda _: self._catch_up_check.start(.1))
|
2016-10-19 06:12:44 +02:00
|
|
|
d.addErrback(log_error)
|
2016-03-12 20:58:58 +01:00
|
|
|
d.addCallback(lambda _: blockchain_caught_d)
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_cmd_runner(self):
|
2016-12-19 21:34:15 +01:00
|
|
|
return Commands(self.config, self.wallet, self.network)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-12-12 20:00:23 +01:00
|
|
|
# run commands as a defer.succeed,
|
|
|
|
# lbryum commands should be run this way , unless if the command
|
|
|
|
# only makes a lbrum server query, use _run_cmd_as_defer_to_thread()
|
|
|
|
def _run_cmd_as_defer_succeed(self, command_name, *args):
|
2016-12-19 21:34:15 +01:00
|
|
|
cmd_runner = self._get_cmd_runner()
|
2016-12-12 20:00:23 +01:00
|
|
|
cmd = known_commands[command_name]
|
2016-12-19 21:34:15 +01:00
|
|
|
func = getattr(cmd_runner, cmd.name)
|
2016-12-12 20:00:23 +01:00
|
|
|
return defer.succeed(func(*args))
|
|
|
|
|
|
|
|
# run commands as a deferToThread, lbryum commands that only make
|
|
|
|
# queries to lbryum server should be run this way
|
2017-01-06 15:04:18 +01:00
|
|
|
# TODO: keep track of running threads and cancel them on `stop`
|
|
|
|
# otherwise the application will hang, waiting for threads to complete
|
2016-12-12 20:00:23 +01:00
|
|
|
def _run_cmd_as_defer_to_thread(self, command_name, *args):
|
2016-12-19 21:34:15 +01:00
|
|
|
cmd_runner = self._get_cmd_runner()
|
2016-12-12 20:00:23 +01:00
|
|
|
cmd = known_commands[command_name]
|
2016-12-19 21:34:15 +01:00
|
|
|
func = getattr(cmd_runner, cmd.name)
|
2016-12-12 20:00:23 +01:00
|
|
|
return threads.deferToThread(func, *args)
|
|
|
|
|
|
|
|
def get_balance(self):
|
2016-08-30 04:33:10 +02:00
|
|
|
accounts = None
|
|
|
|
exclude_claimtrietx = True
|
2016-12-12 20:00:23 +01:00
|
|
|
d = self._run_cmd_as_defer_succeed('getbalance', accounts, exclude_claimtrietx)
|
2017-01-25 17:44:21 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda result: Decimal(result['confirmed']) + Decimal(result.get('unconfirmed', 0.0)))
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def get_new_address(self):
|
2017-01-19 02:15:40 +01:00
|
|
|
addr = self.wallet.get_unused_address(account=None)
|
|
|
|
if addr is None:
|
|
|
|
addr = self.wallet.create_new_address()
|
|
|
|
d = defer.succeed(addr)
|
2016-02-19 06:44:08 +01:00
|
|
|
d.addCallback(self._save_wallet)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def get_block(self, blockhash):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getblock', blockhash)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
|
|
|
header = self.network.get_header(self.network.get_local_height())
|
|
|
|
return defer.succeed(header['timestamp'])
|
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
|
|
|
height = self.network.get_local_height()
|
2016-12-12 20:00:23 +01:00
|
|
|
header = self.network.blockchain.read_header(height)
|
|
|
|
return defer.succeed(self.network.blockchain.hash_header(header))
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-09-09 06:48:54 +02:00
|
|
|
def _get_blockhash(self, height):
|
2016-12-12 20:00:23 +01:00
|
|
|
header = self.network.blockchain.read_header(height)
|
|
|
|
return defer.succeed(self.network.blockchain.hash_header(header))
|
2016-09-09 06:48:54 +02:00
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_name_claims(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('getnameclaims')
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-08-08 07:42:37 +02:00
|
|
|
def _get_claims_for_name(self, name):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimsforname', name)
|
|
|
|
|
|
|
|
def _send_name_claim(self, name, val, amount):
|
|
|
|
broadcast = False
|
|
|
|
log.debug("Name claim %s %s %f", name, val, amount)
|
|
|
|
d = self._run_cmd_as_defer_succeed('claim', name, json.dumps(val), amount, broadcast)
|
|
|
|
d.addCallback(lambda claim_out: self._broadcast_claim_transaction(claim_out))
|
|
|
|
return d
|
2016-08-08 07:42:37 +02:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _send_name_claim_update(self, name, claim_id, claim_outpoint, value, amount):
|
|
|
|
metadata = json.dumps(value)
|
2016-12-12 20:00:23 +01:00
|
|
|
log.debug("Update %s %d %f %s %s '%s'", claim_outpoint['txid'], claim_outpoint['nout'],
|
2017-01-25 17:44:21 +01:00
|
|
|
amount, name, claim_id, metadata)
|
2016-12-12 20:00:23 +01:00
|
|
|
broadcast = False
|
|
|
|
d = self._run_cmd_as_defer_succeed('update', claim_outpoint['txid'], claim_outpoint['nout'],
|
2017-01-25 17:44:21 +01:00
|
|
|
name, claim_id, metadata, amount, broadcast)
|
2016-12-12 20:00:23 +01:00
|
|
|
d.addCallback(lambda claim_out: self._broadcast_claim_transaction(claim_out))
|
|
|
|
return d
|
2016-08-09 03:32:39 +02:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def _abandon_claim(self, claim_outpoint):
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Abandon %s %s" % (claim_outpoint['txid'], claim_outpoint['nout']))
|
2016-12-12 20:00:23 +01:00
|
|
|
broadcast = False
|
2017-01-25 17:44:21 +01:00
|
|
|
d = self._run_cmd_as_defer_succeed('abandon', claim_outpoint['txid'],
|
|
|
|
claim_outpoint['nout'], broadcast)
|
2016-12-12 20:00:23 +01:00
|
|
|
d.addCallback(lambda claim_out: self._broadcast_claim_transaction(claim_out))
|
2016-02-26 07:45:52 +01:00
|
|
|
return d
|
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
def _support_claim(self, name, claim_id, amount):
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Support %s %s %f" % (name, claim_id, amount))
|
2016-12-12 20:00:23 +01:00
|
|
|
broadcast = False
|
|
|
|
d = self._run_cmd_as_defer_succeed('support', name, claim_id, amount, broadcast)
|
|
|
|
d.addCallback(lambda claim_out: self._broadcast_claim_transaction(claim_out))
|
2016-08-08 08:32:56 +02:00
|
|
|
return d
|
|
|
|
|
2016-12-12 20:00:23 +01:00
|
|
|
def _broadcast_claim_transaction(self, claim_out):
|
|
|
|
if 'success' not in claim_out:
|
|
|
|
raise Exception('Unexpected claim command output:{}'.format(claim_out))
|
|
|
|
if claim_out['success']:
|
|
|
|
d = self._broadcast_transaction(claim_out['tx'])
|
|
|
|
d.addCallback(lambda _: claim_out)
|
|
|
|
return d
|
|
|
|
else:
|
2016-12-15 16:06:44 +01:00
|
|
|
return defer.succeed(claim_out)
|
2016-12-12 20:00:23 +01:00
|
|
|
|
2016-02-26 07:45:52 +01:00
|
|
|
def _broadcast_transaction(self, raw_tx):
|
2016-08-09 03:32:39 +02:00
|
|
|
def _log_tx(r):
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Broadcast tx: %s", r)
|
2016-08-09 03:32:39 +02:00
|
|
|
return r
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-12-12 20:00:23 +01:00
|
|
|
d = self._run_cmd_as_defer_to_thread('broadcast', raw_tx)
|
2016-08-09 03:32:39 +02:00
|
|
|
d.addCallback(_log_tx)
|
2016-10-27 20:28:56 +02:00
|
|
|
d.addCallback(
|
|
|
|
lambda r: r if len(r) == 64 else defer.fail(Exception("Transaction rejected")))
|
2016-02-26 07:45:52 +01:00
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _do_send_many(self, payments_to_send):
|
2016-12-12 20:00:23 +01:00
|
|
|
def broadcast_send_many(paytomany_out):
|
|
|
|
if 'hex' not in paytomany_out:
|
|
|
|
raise Exception('Unepxected paytomany output:{}'.format(paytomany_out))
|
|
|
|
return self._broadcast_transaction(paytomany_out['hex'])
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-12-12 20:00:23 +01:00
|
|
|
log.debug("Doing send many. payments to send: %s", str(payments_to_send))
|
|
|
|
d = self._run_cmd_as_defer_succeed('paytomany', payments_to_send.iteritems())
|
|
|
|
d.addCallback(lambda out: broadcast_send_many(out))
|
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
2016-12-28 16:37:53 +01:00
|
|
|
block_header = self.network.blockchain.read_header(
|
|
|
|
self.network.get_local_height() - RECOMMENDED_CLAIMTRIE_HASH_CONFIRMS + 1)
|
2016-12-12 20:00:23 +01:00
|
|
|
block_hash = self.network.blockchain.hash_header(block_header)
|
|
|
|
d = self._run_cmd_as_defer_to_thread('requestvalueforname', name, block_hash)
|
2017-01-25 17:44:21 +01:00
|
|
|
d.addCallback(lambda response: Commands._verify_proof(name, block_header['claim_trie_root'],
|
|
|
|
response))
|
2016-12-12 20:00:23 +01:00
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_claims_from_tx(self, txid):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimsfromtx', txid)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
2017-01-25 17:44:21 +01:00
|
|
|
return defer.succeed(Decimal(self.wallet.get_addr_received(address)) / COIN)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-02-22 20:24:49 +01:00
|
|
|
def get_nametrie(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimtrie')
|
2016-02-22 20:24:49 +01:00
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def _get_history(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('history')
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def _address_is_mine(self, address):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('ismine', address)
|
2016-09-02 07:27:30 +02:00
|
|
|
|
2016-04-18 05:23:20 +02:00
|
|
|
def get_pub_keys(self, wallet):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('getpubkyes', wallet)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _save_wallet(self, val):
|
2016-12-12 20:00:23 +01:00
|
|
|
self.wallet.storage.write()
|
|
|
|
return defer.succeed(val)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class LBRYcrdAddressRequester(object):
|
|
|
|
implements([IRequestCreator])
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IRequestCreator ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'lbrycrd_address': True}, 'lbrycrd_address')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_address_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== internal calls ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def _handle_address_response(self, response_dict, peer, request, protocol):
|
|
|
|
assert request.response_identifier in response_dict, \
|
|
|
|
"Expected %s in dict but did not get it" % request.response_identifier
|
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
address = response_dict[request.response_identifier]
|
|
|
|
self.wallet.update_peer_address(peer, address)
|
|
|
|
|
|
|
|
def _request_failed(self, err, peer):
|
|
|
|
if not err.check(RequestCanceledError):
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("A peer failed to send a valid public key response. Error: %s, peer: %s",
|
|
|
|
err.getErrorMessage(), str(peer))
|
2016-01-22 21:50:18 +01:00
|
|
|
return err
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandlerFactory(object):
|
|
|
|
implements(IQueryHandlerFactory)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IQueryHandlerFactory ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = LBRYcrdAddressQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'lbrycrd_address'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return "LBRYcrd Address - an address for receiving payments via LBRYcrd"
|
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandler(object):
|
|
|
|
implements(IQueryHandler)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['lbrycrd_address']
|
|
|
|
self.address = None
|
|
|
|
self.peer = None
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IQueryHandler ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
|
|
|
def handle_queries(self, queries):
|
|
|
|
|
|
|
|
def create_response(address):
|
|
|
|
self.address = address
|
|
|
|
fields = {'lbrycrd_address': address}
|
|
|
|
return fields
|
|
|
|
|
|
|
|
if self.query_identifiers[0] in queries:
|
|
|
|
d = self.wallet.get_new_address_for_peer(self.peer)
|
|
|
|
d.addCallback(create_response)
|
|
|
|
return d
|
|
|
|
if self.address is None:
|
2015-09-08 21:42:56 +02:00
|
|
|
log.warning("Expected a request for an address, but did not receive one")
|
2017-01-25 17:44:21 +01:00
|
|
|
return defer.fail(
|
|
|
|
Failure(ValueError("Expected but did not receive an address request")))
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2016-07-04 22:40:52 +02:00
|
|
|
return defer.succeed({})
|
2016-10-04 20:58:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def make_config(config=None):
|
|
|
|
if config is None:
|
|
|
|
config = {}
|
2017-01-25 17:44:21 +01:00
|
|
|
return SimpleConfig(config) if isinstance(config, dict) else config
|