2016-07-26 03:45:42 +02:00
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import os
|
2017-04-24 00:16:03 +02:00
|
|
|
import json
|
|
|
|
import time
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
from twisted.internet import threads, reactor, defer, task
|
|
|
|
from twisted.python.failure import Failure
|
2015-10-28 06:38:01 +01:00
|
|
|
from twisted.enterprise import adbapi
|
2017-05-10 07:26:20 +02:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
from collections import defaultdict, deque
|
|
|
|
from zope.interface import implements
|
|
|
|
from decimal import Decimal
|
2016-07-26 03:45:42 +02:00
|
|
|
|
|
|
|
from lbryum import SimpleConfig, Network
|
2017-04-07 02:42:07 +02:00
|
|
|
from lbryum.lbrycrd import COIN
|
2016-09-26 03:54:15 +02:00
|
|
|
import lbryum.wallet
|
2016-07-26 03:45:42 +02:00
|
|
|
from lbryum.commands import known_commands, Commands
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
from lbryschema.uri import parse_lbry_uri
|
2017-04-03 21:58:20 +02:00
|
|
|
from lbryschema.claim import ClaimDict
|
|
|
|
from lbryschema.error import DecodeError
|
2017-04-09 22:10:07 +02:00
|
|
|
from lbryschema.decode import smart_decode
|
2017-04-03 21:58:20 +02:00
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
from lbrynet.core.sqlite_helpers import rerun_if_locked
|
2016-09-27 20:18:35 +02:00
|
|
|
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
|
2016-07-26 03:45:42 +02:00
|
|
|
from lbrynet.core.client.ClientRequest import ClientRequest
|
2017-04-09 22:10:07 +02:00
|
|
|
from lbrynet.core.Error import RequestCanceledError, InsufficientFundsError, UnknownNameError
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
CLAIM_CACHE_TIME = 600
|
|
|
|
|
2015-09-08 21:42:56 +02:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class ReservedPoints(object):
|
|
|
|
def __init__(self, identifier, amount):
|
|
|
|
self.identifier = identifier
|
|
|
|
self.amount = amount
|
|
|
|
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
class ClaimOutpoint(dict):
|
|
|
|
def __init__(self, txid, nout):
|
|
|
|
if len(txid) != 64:
|
|
|
|
raise TypeError('{} is not a txid'.format(txid))
|
|
|
|
self['txid'] = txid
|
2016-11-28 20:23:10 +01:00
|
|
|
self['nout'] = nout
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
2016-11-28 20:23:10 +01:00
|
|
|
return "{}:{}".format(self['txid'], self['nout'])
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
def __eq__(self, compare):
|
2016-11-28 20:23:10 +01:00
|
|
|
if isinstance(compare, dict):
|
2016-10-14 08:13:37 +02:00
|
|
|
# TODO: lbryum returns nout's in dicts as "nOut" , need to fix this
|
2016-11-28 20:23:10 +01:00
|
|
|
if 'nOut' in compare:
|
|
|
|
return (self['txid'], self['nout']) == (compare['txid'], compare['nOut'])
|
|
|
|
elif 'nout' in compare:
|
|
|
|
return (self['txid'], self['nout']) == (compare['txid'], compare['nout'])
|
2017-04-25 20:31:05 +02:00
|
|
|
elif isinstance(compare, (str, unicode)):
|
2017-03-07 23:03:07 +01:00
|
|
|
return compare == self.__repr__()
|
2016-10-14 08:13:37 +02:00
|
|
|
else:
|
|
|
|
raise TypeError('cannot compare {}'.format(type(compare)))
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def __ne__(self, compare):
|
2016-11-28 20:23:10 +01:00
|
|
|
return not self.__eq__(compare)
|
2016-10-14 08:13:37 +02:00
|
|
|
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
class CachedClaim(object):
|
|
|
|
def __init__(self, claim_id, claim, claim_sequence, address, height, amount, supports,
|
|
|
|
channal_name, signature_is_valid, cache_timestamp, name, txid, nout):
|
|
|
|
self.claim_id = claim_id
|
|
|
|
self.claim = claim
|
|
|
|
self.claim_sequence = claim_sequence
|
|
|
|
self.address = address
|
|
|
|
self.height = height
|
|
|
|
self.amount = amount
|
|
|
|
self.supports = [] if not supports else json.loads(supports)
|
|
|
|
self.effective_amount = self.amount + sum([x['amount'] for x in self.supports])
|
|
|
|
self.channel_name = channal_name
|
|
|
|
self.signature_is_valid = signature_is_valid
|
|
|
|
self.cache_timestamp = cache_timestamp
|
|
|
|
self.name = name
|
|
|
|
self.txid = txid
|
|
|
|
self.nout = nout
|
|
|
|
|
|
|
|
def response_dict(self, check_expires=True):
|
|
|
|
if check_expires and (time.time() - int(self.cache_timestamp)) > CLAIM_CACHE_TIME:
|
|
|
|
return
|
|
|
|
claim = {
|
|
|
|
"height": self.height,
|
|
|
|
"address": self.address,
|
|
|
|
"claim_id": self.claim_id,
|
|
|
|
"claim_sequence": self.claim_sequence,
|
|
|
|
"effective_amount": self.effective_amount,
|
|
|
|
"has_signature": self.claim.has_signature,
|
|
|
|
"name": self.name,
|
|
|
|
"hex": self.claim.serialized.encode('hex'),
|
|
|
|
"value": self.claim.claim_dict,
|
|
|
|
"txid": self.txid,
|
|
|
|
"amount": self.amount,
|
|
|
|
"decoded_claim": True,
|
|
|
|
"supports": self.supports,
|
|
|
|
"nout": self.nout
|
|
|
|
}
|
|
|
|
if self.channel_name is not None:
|
|
|
|
claim['channel_name'] = self.channel_name
|
|
|
|
if self.signature_is_valid is not None:
|
|
|
|
claim['signature_is_valid'] = bool(self.signature_is_valid)
|
|
|
|
return claim
|
|
|
|
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
class MetaDataStorage(object):
|
|
|
|
def load(self):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
def get_claimid_for_tx(self, claim_outpoint):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_cached_claim(self, claim_id, check_expire=True):
|
|
|
|
cache_info = yield self._get_cached_claim(claim_id)
|
|
|
|
response = None
|
|
|
|
if cache_info:
|
|
|
|
cached_claim = CachedClaim(claim_id, *cache_info)
|
|
|
|
response = cached_claim.response_dict(check_expires=check_expire)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
def _get_cached_claim(self, claim_id):
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def save_claim_to_cache(self, claim_id, claim_sequence, claim, claim_address, height, amount,
|
|
|
|
supports, channel_name, signature_is_valid):
|
2016-12-01 06:28:25 +01:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
def save_claim_to_uri_cache(self, uri, claim_id, certificate_id=None):
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def get_cached_claim_for_uri(self, uri, check_expire=True):
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
|
|
|
|
class InMemoryStorage(MetaDataStorage):
|
|
|
|
def __init__(self):
|
|
|
|
self.metadata = {}
|
|
|
|
self.claimids = {}
|
2017-04-24 00:16:03 +02:00
|
|
|
self.claim_dicts = {}
|
|
|
|
self.uri_cache = {}
|
2016-12-01 06:28:25 +01:00
|
|
|
MetaDataStorage.__init__(self)
|
|
|
|
|
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
self.metadata[sd_hash] = (name, claim_outpoint)
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
try:
|
|
|
|
name, claim_outpoint = self.metadata[sd_hash]
|
|
|
|
return defer.succeed((name, claim_outpoint['txid'], claim_outpoint['nout']))
|
|
|
|
except KeyError:
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
2017-04-23 19:33:06 +02:00
|
|
|
self.claimids[(name, claim_outpoint['txid'], claim_outpoint['nout'])] = claim_id
|
2016-12-01 06:28:25 +01:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
def get_claimid_for_tx(self, claim_outpoint):
|
|
|
|
result = None
|
|
|
|
for k, claim_id in self.claimids.iteritems():
|
|
|
|
if k[1] == claim_outpoint['txid'] and k[2] == claim_outpoint['nout']:
|
|
|
|
result = claim_id
|
|
|
|
break
|
|
|
|
|
|
|
|
return defer.succeed(result)
|
|
|
|
|
|
|
|
def _get_cached_claim(self, claim_id):
|
|
|
|
claim_cache = self.claim_dicts.get(claim_id, None)
|
|
|
|
claim_tx_cache = None
|
|
|
|
for k, v in self.claimids.iteritems():
|
|
|
|
if v == claim_id:
|
|
|
|
claim_tx_cache = k
|
|
|
|
break
|
|
|
|
|
|
|
|
if claim_cache and claim_tx_cache:
|
|
|
|
cached_claim_args = tuple(claim_cache) + tuple(claim_tx_cache)
|
|
|
|
return defer.succeed(cached_claim_args)
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
def save_claim_to_cache(self, claim_id, claim_sequence, claim, claim_address, height, amount,
|
|
|
|
supports, channel_name, signature_is_valid):
|
|
|
|
self.claim_dicts[claim_id] = (claim, claim_sequence, claim_address, height, amount,
|
|
|
|
supports, channel_name, signature_is_valid, int(time.time()))
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def save_claim_to_uri_cache(self, uri, claim_id, certificate_id=None):
|
|
|
|
self.uri_cache[uri] = (claim_id, certificate_id)
|
|
|
|
return defer.succeed(None)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_cached_claim_for_uri(self, uri, check_expire=True):
|
|
|
|
result = self.uri_cache.get(uri, None)
|
|
|
|
response = None
|
|
|
|
if result:
|
|
|
|
claim_id, certificate_id = result
|
|
|
|
response = yield self.get_cached_claim(claim_id, check_expire)
|
|
|
|
if response and certificate_id:
|
|
|
|
certificate = yield self.get_cached_claim(certificate_id, check_expire)
|
|
|
|
response['certificate'] = certificate['claim']
|
|
|
|
defer.returnValue(response)
|
2016-12-01 06:28:25 +01:00
|
|
|
|
|
|
|
|
|
|
|
class SqliteStorage(MetaDataStorage):
|
|
|
|
def __init__(self, db_dir):
|
|
|
|
self.db_dir = db_dir
|
2017-04-23 19:33:06 +02:00
|
|
|
self.db = adbapi.ConnectionPool('sqlite3', os.path.join(self.db_dir, "blockchainname.db"),
|
2016-12-01 06:28:25 +01:00
|
|
|
check_same_thread=False)
|
2017-04-24 00:16:03 +02:00
|
|
|
MetaDataStorage.__init__(self)
|
2016-12-01 06:28:25 +01:00
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
def load(self):
|
2016-12-01 06:28:25 +01:00
|
|
|
def create_tables(transaction):
|
2017-05-10 17:30:36 +02:00
|
|
|
transaction.execute("CREATE TABLE IF NOT EXISTS name_metadata (" +
|
|
|
|
" name TEXT UNIQUE NOT NULL, " +
|
|
|
|
" txid TEXT NOT NULL, " +
|
|
|
|
" n INTEGER NOT NULL, " +
|
|
|
|
" sd_hash TEXT NOT NULL)")
|
2016-12-01 06:28:25 +01:00
|
|
|
transaction.execute("create table if not exists claim_ids (" +
|
|
|
|
" claimId text, " +
|
|
|
|
" name text, " +
|
|
|
|
" txid text, " +
|
|
|
|
" n integer)")
|
2017-04-24 00:16:03 +02:00
|
|
|
transaction.execute("CREATE TABLE IF NOT EXISTS claim_cache (" +
|
|
|
|
" row_id INTEGER PRIMARY KEY AUTOINCREMENT, " +
|
|
|
|
" claim_id TEXT UNIQUE NOT NULL, " +
|
|
|
|
" claim_sequence INTEGER, " +
|
|
|
|
" claim_address TEXT NOT NULL, " +
|
|
|
|
" height INTEGER NOT NULL, " +
|
|
|
|
" amount INTEGER NOT NULL, " +
|
|
|
|
" supports TEXT, " +
|
|
|
|
" claim_pb TEXT, " +
|
|
|
|
" channel_name TEXT, " +
|
|
|
|
" signature_is_valid BOOL, " +
|
|
|
|
" last_modified TEXT)")
|
|
|
|
transaction.execute("CREATE TABLE IF NOT EXISTS uri_cache (" +
|
|
|
|
" row_id INTEGER PRIMARY KEY AUTOINCREMENT, " +
|
|
|
|
" uri TEXT UNIQUE NOT NULL, " +
|
|
|
|
" cache_row INTEGER, " +
|
|
|
|
" certificate_row INTEGER, " +
|
|
|
|
" last_modified TEXT)")
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
return self.db.runInteraction(create_tables)
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
2016-12-01 06:28:25 +01:00
|
|
|
def save_name_metadata(self, name, claim_outpoint, sd_hash):
|
2017-05-10 17:30:36 +02:00
|
|
|
# TODO: refactor the 'claim_ids' table to not be terrible
|
2017-04-24 00:16:03 +02:00
|
|
|
txid, nout = claim_outpoint['txid'], claim_outpoint['nout']
|
2017-05-10 07:26:20 +02:00
|
|
|
yield self.db.runOperation("INSERT OR REPLACE INTO name_metadata VALUES (?, ?, ?, ?)",
|
2017-04-24 00:16:03 +02:00
|
|
|
(name, txid, nout, sd_hash))
|
|
|
|
defer.returnValue(None)
|
2016-12-01 06:28:25 +01:00
|
|
|
|
|
|
|
@rerun_if_locked
|
2017-04-24 00:16:03 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-12-01 06:28:25 +01:00
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
2017-04-24 00:16:03 +02:00
|
|
|
result = yield self.db.runQuery("SELECT name, txid, n FROM name_metadata WHERE sd_hash=?",
|
|
|
|
(sd_hash, ))
|
|
|
|
response = None
|
|
|
|
if result:
|
|
|
|
response = result[0]
|
|
|
|
defer.returnValue(response)
|
2016-12-01 06:28:25 +01:00
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
2016-12-01 06:28:25 +01:00
|
|
|
def update_claimid(self, claim_id, name, claim_outpoint):
|
2017-04-24 00:16:03 +02:00
|
|
|
txid, nout = claim_outpoint['txid'], claim_outpoint['nout']
|
|
|
|
yield self.db.runOperation("INSERT OR IGNORE INTO claim_ids VALUES (?, ?, ?, ?)",
|
|
|
|
(claim_id, name, txid, nout))
|
|
|
|
defer.returnValue(claim_id)
|
2016-12-01 06:28:25 +01:00
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_claimid_for_tx(self, claim_outpoint):
|
|
|
|
result = yield self.db.runQuery("SELECT claimId FROM claim_ids "
|
|
|
|
"WHERE txid=? AND n=?",
|
|
|
|
(claim_outpoint['txid'], claim_outpoint['nout']))
|
|
|
|
response = None
|
|
|
|
if result:
|
|
|
|
response = result[0][0]
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _get_cached_claim(self, claim_id, check_expire=True):
|
|
|
|
r = yield self.db.runQuery("SELECT * FROM claim_cache WHERE claim_id=?", (claim_id, ))
|
|
|
|
claim_tx_info = yield self.db.runQuery("SELECT name, txid, n FROM claim_ids "
|
|
|
|
"WHERE claimId=?", (claim_id, ))
|
|
|
|
response = None
|
|
|
|
if r and claim_tx_info:
|
|
|
|
_, _, seq, claim_address, height, amount, supports, raw, chan_name, valid, ts = r[0]
|
|
|
|
last_modified = int(ts)
|
|
|
|
name, txid, nout = claim_tx_info[0]
|
|
|
|
claim = ClaimDict.deserialize(raw.decode('hex'))
|
|
|
|
response = (claim, seq, claim_address, height, amount, supports,
|
|
|
|
chan_name, valid, last_modified, name, txid, nout)
|
|
|
|
defer.returnValue(response)
|
|
|
|
|
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def save_claim_to_cache(self, claim_id, claim_sequence, claim, claim_address, height, amount,
|
|
|
|
supports, channel_name, signature_is_valid):
|
|
|
|
serialized = claim.serialized.encode("hex")
|
|
|
|
supports = json.dumps([] or supports)
|
|
|
|
now = str(int(time.time()))
|
|
|
|
|
|
|
|
yield self.db.runOperation("INSERT OR REPLACE INTO claim_cache(claim_sequence, "
|
|
|
|
" claim_id, claim_address, height, "
|
|
|
|
" amount, supports, claim_pb, "
|
|
|
|
" channel_name, signature_is_valid, "
|
|
|
|
" last_modified)"
|
|
|
|
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
|
|
|
(claim_sequence, claim_id, claim_address, height, amount,
|
|
|
|
supports, serialized, channel_name, signature_is_valid, now))
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def save_claim_to_uri_cache(self, uri, claim_id, certificate_id=None):
|
|
|
|
result = yield self.db.runQuery("SELECT row_id, last_modified FROM claim_cache "
|
|
|
|
"WHERE claim_id=?", (claim_id, ))
|
|
|
|
certificate_result = None
|
|
|
|
certificate_row = None
|
|
|
|
|
|
|
|
if certificate_id:
|
|
|
|
certificate_result = yield self.db.runQuery("SELECT row_id FROM claim_cache "
|
|
|
|
"WHERE claim_id=?", (certificate_id, ))
|
|
|
|
if certificate_id is not None and certificate_result is None:
|
|
|
|
log.warning("Certificate is not in cache")
|
|
|
|
elif certificate_result:
|
|
|
|
certificate_row = certificate_result[0][0]
|
|
|
|
|
|
|
|
if result:
|
|
|
|
cache_row, ts = result[0]
|
|
|
|
yield self.db.runOperation("INSERT OR REPLACE INTO uri_cache(uri, cache_row, "
|
|
|
|
" certificate_row, last_modified) "
|
|
|
|
"VALUES (?, ?, ?, ?)",
|
|
|
|
(uri, cache_row, certificate_row,
|
|
|
|
str(int(time.time()))))
|
|
|
|
else:
|
|
|
|
log.warning("Claim is not in cache")
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
|
|
|
@rerun_if_locked
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_cached_claim_for_uri(self, uri, check_expire=True):
|
|
|
|
result = yield self.db.runQuery("SELECT "
|
|
|
|
"claim.claim_id, cert.claim_id, uri_cache.last_modified "
|
|
|
|
"FROM uri_cache "
|
|
|
|
"INNER JOIN claim_cache as claim "
|
|
|
|
"ON uri_cache.cache_row=claim.row_id "
|
|
|
|
"LEFT OUTER JOIN claim_cache as cert "
|
|
|
|
"ON uri_cache.certificate_row=cert.row_id "
|
|
|
|
"WHERE uri_cache.uri=?", (uri, ))
|
|
|
|
response = None
|
|
|
|
if result:
|
|
|
|
claim_id, certificate_id, last_modified = result[0]
|
|
|
|
last_modified = int(last_modified)
|
|
|
|
if check_expire and time.time() - last_modified > CLAIM_CACHE_TIME:
|
|
|
|
defer.returnValue(None)
|
|
|
|
claim = yield self.get_cached_claim(claim_id)
|
|
|
|
if claim:
|
|
|
|
response = {
|
|
|
|
"claim": claim
|
|
|
|
}
|
|
|
|
if response and certificate_id is not None:
|
|
|
|
certificate = yield self.get_cached_claim(certificate_id)
|
|
|
|
response['certificate'] = certificate
|
|
|
|
defer.returnValue(response)
|
2015-09-22 19:06:20 +02:00
|
|
|
|
|
|
|
|
2016-09-27 20:18:35 +02:00
|
|
|
class Wallet(object):
|
|
|
|
"""This class implements the Wallet interface for the LBRYcrd payment system"""
|
|
|
|
implements(IWallet)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
def __init__(self, storage):
|
|
|
|
if not isinstance(storage, MetaDataStorage):
|
|
|
|
raise ValueError('storage must be an instance of MetaDataStorage')
|
|
|
|
self._storage = storage
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
self.wallet_balance = Decimal(0.0)
|
|
|
|
self.total_reserved_points = Decimal(0.0)
|
|
|
|
self.peer_addresses = {} # {Peer: string}
|
2017-04-23 19:33:06 +02:00
|
|
|
self.queued_payments = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
|
|
|
self.expected_balances = defaultdict(Decimal) # {address(string): amount(Decimal)}
|
2015-08-20 17:27:15 +02:00
|
|
|
self.current_address_given_to_peer = {} # {Peer: address(string)}
|
2016-11-28 20:23:10 +01:00
|
|
|
# (Peer, address(string), amount(Decimal), time(datetime), count(int),
|
|
|
|
# incremental_amount(float))
|
|
|
|
self.expected_balance_at_time = deque()
|
2015-08-20 17:27:15 +02:00
|
|
|
self.max_expected_payment_time = datetime.timedelta(minutes=3)
|
|
|
|
self.stopped = True
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
self.manage_running = False
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count = 0
|
|
|
|
self._balance_refresh_time = 3
|
2016-03-18 02:55:06 +01:00
|
|
|
self._batch_count = 20
|
2015-10-29 01:59:07 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def start(self):
|
|
|
|
def start_manage():
|
|
|
|
self.stopped = False
|
|
|
|
self.manage()
|
|
|
|
return True
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
d = self._storage.load()
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addCallback(lambda _: self._start())
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(lambda _: start_manage())
|
|
|
|
return d
|
|
|
|
|
2016-12-01 06:28:25 +01:00
|
|
|
def _save_name_metadata(self, name, claim_outpoint, sd_hash):
|
|
|
|
return self._storage.save_name_metadata(name, claim_outpoint, sd_hash)
|
|
|
|
|
|
|
|
def _get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return self._storage.get_claim_metadata_for_sd_hash(sd_hash)
|
|
|
|
|
|
|
|
def _update_claimid(self, claim_id, name, claim_outpoint):
|
|
|
|
return self._storage.update_claimid(claim_id, name, claim_outpoint)
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
@staticmethod
|
|
|
|
def log_stop_error(err):
|
2017-04-23 19:33:06 +02:00
|
|
|
log.error("An error occurred stopping the wallet: %s", err.getTraceback())
|
2016-02-17 05:10:26 +01:00
|
|
|
|
|
|
|
def stop(self):
|
2016-11-03 20:42:45 +01:00
|
|
|
log.info("Stopping %s", self)
|
2015-08-20 17:27:15 +02:00
|
|
|
self.stopped = True
|
|
|
|
# If self.next_manage_call is None, then manage is currently running or else
|
|
|
|
# start has not been called, so set stopped and do nothing else.
|
|
|
|
if self.next_manage_call is not None:
|
|
|
|
self.next_manage_call.cancel()
|
|
|
|
self.next_manage_call = None
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d = self.manage(do_full=True)
|
2016-02-17 05:10:26 +01:00
|
|
|
d.addErrback(self.log_stop_error)
|
|
|
|
d.addCallback(lambda _: self._stop())
|
|
|
|
d.addErrback(self.log_stop_error)
|
2015-08-20 17:27:15 +02:00
|
|
|
return d
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def manage(self, do_full=False):
|
2015-08-20 17:27:15 +02:00
|
|
|
self.next_manage_call = None
|
|
|
|
have_set_manage_running = [False]
|
2016-03-18 01:19:13 +01:00
|
|
|
self._manage_count += 1
|
|
|
|
if self._manage_count % self._batch_count == 0:
|
|
|
|
self._manage_count = 0
|
|
|
|
do_full = True
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def check_if_manage_running():
|
|
|
|
|
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def fire_if_not_running():
|
|
|
|
if self.manage_running is False:
|
|
|
|
self.manage_running = True
|
|
|
|
have_set_manage_running[0] = True
|
|
|
|
d.callback(True)
|
2016-03-18 01:19:13 +01:00
|
|
|
elif do_full is False:
|
|
|
|
d.callback(False)
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
|
|
|
task.deferLater(reactor, 1, fire_if_not_running)
|
|
|
|
|
|
|
|
fire_if_not_running()
|
|
|
|
return d
|
|
|
|
|
|
|
|
d = check_if_manage_running()
|
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
def do_manage():
|
|
|
|
if do_full:
|
|
|
|
d = self._check_expected_balances()
|
|
|
|
d.addCallback(lambda _: self._send_payments())
|
|
|
|
else:
|
|
|
|
d = defer.succeed(True)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-10-19 06:12:44 +02:00
|
|
|
def log_error(err):
|
|
|
|
if isinstance(err, AttributeError):
|
|
|
|
log.warning("Failed to get an updated balance")
|
2017-04-23 19:33:06 +02:00
|
|
|
log.warning("Last balance update: %s", str(self.wallet_balance))
|
2016-10-19 06:12:44 +02:00
|
|
|
|
2017-01-12 18:51:44 +01:00
|
|
|
d.addCallbacks(lambda _: self.update_balance(), log_error)
|
2016-03-18 01:19:13 +01:00
|
|
|
return d
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2016-03-18 01:19:13 +01:00
|
|
|
d.addCallback(lambda should_run: do_manage() if should_run else None)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def set_next_manage_call():
|
|
|
|
if not self.stopped:
|
2017-04-23 19:33:06 +02:00
|
|
|
self.next_manage_call = reactor.callLater(self._balance_refresh_time, self.manage)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
d.addCallback(lambda _: set_next_manage_call())
|
|
|
|
|
|
|
|
def log_error(err):
|
2016-11-28 20:23:10 +01:00
|
|
|
log.error("Something went wrong during manage. Error message: %s",
|
|
|
|
err.getErrorMessage())
|
2015-08-20 17:27:15 +02:00
|
|
|
return err
|
|
|
|
|
|
|
|
d.addErrback(log_error)
|
|
|
|
|
|
|
|
def set_manage_not_running(arg):
|
|
|
|
if have_set_manage_running[0] is True:
|
|
|
|
self.manage_running = False
|
|
|
|
return arg
|
|
|
|
|
|
|
|
d.addBoth(set_manage_not_running)
|
|
|
|
return d
|
|
|
|
|
2017-01-12 18:51:44 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def update_balance(self):
|
|
|
|
""" obtain balance from lbryum wallet and set self.wallet_balance
|
|
|
|
"""
|
|
|
|
balance = yield self._update_balance()
|
|
|
|
if self.wallet_balance != balance:
|
|
|
|
log.debug("Got a new balance: %s", balance)
|
|
|
|
self.wallet_balance = balance
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_info_exchanger(self):
|
|
|
|
return LBRYcrdAddressRequester(self)
|
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
|
|
|
return LBRYcrdAddressQueryHandlerFactory(self)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def reserve_points(self, identifier, amount):
|
2016-11-28 20:23:10 +01:00
|
|
|
"""Ensure a certain amount of points are available to be sent as
|
|
|
|
payment, before the service is rendered
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
@param identifier: The peer to which the payment will ultimately be sent
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
@param amount: The amount of points to reserve
|
|
|
|
|
2016-11-28 20:23:10 +01:00
|
|
|
@return: A ReservedPoints object which is given to send_points
|
|
|
|
once the service has been rendered
|
2015-08-20 17:27:15 +02:00
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
2017-01-12 18:51:44 +01:00
|
|
|
if self.get_balance() >= rounded_amount:
|
2015-08-20 17:27:15 +02:00
|
|
|
self.total_reserved_points += rounded_amount
|
2015-09-22 18:08:17 +02:00
|
|
|
return ReservedPoints(identifier, rounded_amount)
|
2015-08-20 17:27:15 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def cancel_point_reservation(self, reserved_points):
|
|
|
|
"""
|
|
|
|
Return all of the points that were reserved previously for some ReservedPoints object
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints previously returned by reserve_points
|
|
|
|
|
|
|
|
@return: None
|
|
|
|
"""
|
|
|
|
self.total_reserved_points -= reserved_points.amount
|
|
|
|
|
|
|
|
def send_points(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to a peer
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send, must be less than or equal to the
|
|
|
|
amount reserved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
peer = reserved_points.identifier
|
2017-01-25 17:44:21 +01:00
|
|
|
assert rounded_amount <= reserved_points.amount
|
|
|
|
assert peer in self.peer_addresses
|
2015-08-20 17:27:15 +02:00
|
|
|
self.queued_payments[self.peer_addresses[peer]] += rounded_amount
|
|
|
|
# make any unused points available
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("ordering that %s points be sent to %s", str(rounded_amount),
|
2017-01-25 17:44:21 +01:00
|
|
|
str(self.peer_addresses[peer]))
|
2015-08-20 17:27:15 +02:00
|
|
|
peer.update_stats('points_sent', amount)
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-09-22 18:08:17 +02:00
|
|
|
def send_points_to_address(self, reserved_points, amount):
|
|
|
|
"""
|
|
|
|
Schedule a payment to be sent to an address
|
|
|
|
|
|
|
|
@param reserved_points: ReservedPoints object previously returned by reserve_points
|
|
|
|
|
|
|
|
@param amount: amount of points to actually send. must be less than or equal to the
|
|
|
|
amount reselved in reserved_points
|
|
|
|
|
|
|
|
@return: Deferred which fires when the payment has been scheduled
|
|
|
|
"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
|
|
|
address = reserved_points.identifier
|
2017-01-25 17:44:21 +01:00
|
|
|
assert rounded_amount <= reserved_points.amount
|
2015-09-22 18:08:17 +02:00
|
|
|
self.queued_payments[address] += rounded_amount
|
|
|
|
self.total_reserved_points -= (reserved_points.amount - rounded_amount)
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Ordering that %s points be sent to %s", str(rounded_amount),
|
2017-01-25 17:44:21 +01:00
|
|
|
str(address))
|
2015-09-22 18:08:17 +02:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
def add_expected_payment(self, peer, amount):
|
|
|
|
"""Increase the number of points expected to be paid by a peer"""
|
|
|
|
rounded_amount = Decimal(str(round(amount, 8)))
|
2017-01-25 17:44:21 +01:00
|
|
|
assert peer in self.current_address_given_to_peer
|
2015-08-20 17:27:15 +02:00
|
|
|
address = self.current_address_given_to_peer[peer]
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("expecting a payment at address %s in the amount of %s",
|
2017-01-25 17:44:21 +01:00
|
|
|
str(address), str(rounded_amount))
|
2015-08-20 17:27:15 +02:00
|
|
|
self.expected_balances[address] += rounded_amount
|
|
|
|
expected_balance = self.expected_balances[address]
|
2017-04-23 19:33:06 +02:00
|
|
|
expected_time = datetime.datetime.now() + self.max_expected_payment_time
|
2016-11-28 20:23:10 +01:00
|
|
|
self.expected_balance_at_time.append(
|
|
|
|
(peer, address, expected_balance, expected_time, 0, amount))
|
2015-08-20 17:27:15 +02:00
|
|
|
peer.update_stats('expected_points', amount)
|
|
|
|
|
|
|
|
def update_peer_address(self, peer, address):
|
|
|
|
self.peer_addresses[peer] = address
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
def get_unused_address_for_peer(self, peer):
|
2015-08-20 17:27:15 +02:00
|
|
|
def set_address_for_peer(address):
|
|
|
|
self.current_address_given_to_peer[peer] = address
|
|
|
|
return address
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
d = self.get_unused_address()
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(set_address_for_peer)
|
|
|
|
return d
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _send_payments(self):
|
|
|
|
payments_to_send = {}
|
|
|
|
for address, points in self.queued_payments.items():
|
2016-08-27 08:42:20 +02:00
|
|
|
if points > 0:
|
2017-04-23 19:33:06 +02:00
|
|
|
log.debug("Should be sending %s points to %s", str(points), str(address))
|
2016-08-27 08:42:20 +02:00
|
|
|
payments_to_send[address] = points
|
|
|
|
self.total_reserved_points -= points
|
|
|
|
else:
|
|
|
|
log.info("Skipping dust")
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
del self.queued_payments[address]
|
2016-08-27 08:42:20 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
if payments_to_send:
|
2017-04-23 19:33:06 +02:00
|
|
|
log.debug("Creating a transaction with outputs %s", str(payments_to_send))
|
2016-02-19 06:44:08 +01:00
|
|
|
d = self._do_send_many(payments_to_send)
|
|
|
|
d.addCallback(lambda txid: log.debug("Sent transaction %s", txid))
|
|
|
|
return d
|
2017-01-12 18:51:44 +01:00
|
|
|
|
2016-08-23 01:56:42 +02:00
|
|
|
log.debug("There were no payments to send")
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.succeed(True)
|
|
|
|
|
2017-04-23 19:33:06 +02:00
|
|
|
######
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-04-24 00:16:03 +02:00
|
|
|
def get_cached_claim(self, claim_id, check_expire=True):
|
|
|
|
results = yield self._storage.get_cached_claim(claim_id, check_expire)
|
|
|
|
defer.returnValue(results)
|
2016-08-05 00:44:12 +02:00
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_claim(self, claim_id, check_expire=True):
|
|
|
|
cached_claim = yield self.get_cached_claim(claim_id, check_expire)
|
|
|
|
if cached_claim:
|
|
|
|
result = cached_claim
|
|
|
|
else:
|
|
|
|
log.debug("Refreshing cached claim: %s", claim_id)
|
|
|
|
claim = yield self._get_claim_by_claimid(claim_id)
|
|
|
|
result = None
|
|
|
|
if claim:
|
|
|
|
result = yield self._handle_claim_result(claim)
|
2016-08-05 00:44:12 +02:00
|
|
|
else:
|
2017-04-24 00:16:03 +02:00
|
|
|
log.warning("Claim does not exist: %s", claim_id)
|
|
|
|
defer.returnValue(result)
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_claimid(self, txid, nout):
|
2016-12-14 00:08:29 +01:00
|
|
|
claim_outpoint = ClaimOutpoint(txid, nout)
|
2017-04-24 00:16:03 +02:00
|
|
|
claim_id = yield self._storage.get_claimid_for_tx(claim_outpoint)
|
|
|
|
defer.returnValue(claim_id)
|
2016-07-28 20:55:17 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-09-16 02:15:20 +02:00
|
|
|
def get_my_claim(self, name):
|
2017-04-07 02:42:07 +02:00
|
|
|
my_claims = yield self.get_name_claims()
|
|
|
|
my_claim = False
|
|
|
|
for claim in my_claims:
|
|
|
|
if claim['name'] == name:
|
|
|
|
claim['value'] = ClaimDict.load_dict(claim['value'])
|
|
|
|
my_claim = claim
|
|
|
|
break
|
|
|
|
defer.returnValue(my_claim)
|
2016-09-16 02:15:20 +02:00
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-04-24 00:16:03 +02:00
|
|
|
def get_claim_info(self, name, txid=None, nout=None, claim_id=None, check_expire=True):
|
2017-04-09 22:10:07 +02:00
|
|
|
if claim_id is not None:
|
2017-04-24 00:16:03 +02:00
|
|
|
results = yield self.get_claim(claim_id, check_expire)
|
2017-04-09 22:10:07 +02:00
|
|
|
if results['name'] != name:
|
|
|
|
raise Exception("Name does not match claim referenced by id")
|
|
|
|
elif txid is None or nout is None:
|
|
|
|
results = yield self.get_claim_by_name(name)
|
2016-11-28 20:23:10 +01:00
|
|
|
else:
|
2017-04-24 00:16:03 +02:00
|
|
|
results = yield self.get_claim_by_outpoint(ClaimOutpoint(txid, nout), check_expire)
|
2017-04-09 22:10:07 +02:00
|
|
|
defer.returnValue(results)
|
2016-08-08 07:46:19 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-04-24 00:16:03 +02:00
|
|
|
def _handle_claim_result(self, results, update_caches=True):
|
2017-04-09 22:10:07 +02:00
|
|
|
if not results:
|
|
|
|
raise UnknownNameError("No results to return")
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
if 'error' in results:
|
2017-05-29 21:43:17 +02:00
|
|
|
if results['error'] in ['name is not claimed', 'claim not found']:
|
2017-04-09 22:10:07 +02:00
|
|
|
raise UnknownNameError(results['error'])
|
|
|
|
else:
|
|
|
|
raise Exception(results['error'])
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
if 'certificate' in results:
|
|
|
|
try:
|
|
|
|
decoded = smart_decode(results['certificate']['value'])
|
|
|
|
claim_dict = decoded.claim_dict
|
|
|
|
outpoint = ClaimOutpoint(results['certificate']['txid'],
|
|
|
|
results['certificate']['nout'])
|
|
|
|
name = results['certificate']['name']
|
|
|
|
results['certificate']['value'] = claim_dict
|
|
|
|
results['certificate']['hex'] = decoded.serialized.encode('hex')
|
|
|
|
if update_caches:
|
2017-05-11 20:19:52 +02:00
|
|
|
if decoded.is_stream:
|
|
|
|
yield self._save_name_metadata(name, outpoint, decoded.source_hash)
|
2017-04-24 00:16:03 +02:00
|
|
|
yield self._update_claimid(results['certificate']['claim_id'], name, outpoint)
|
|
|
|
yield self._storage.save_claim_to_cache(results['certificate']['claim_id'],
|
|
|
|
results['certificate']['claim_sequence'],
|
|
|
|
decoded, results['certificate']['address'],
|
|
|
|
results['certificate']['height'],
|
|
|
|
results['certificate']['amount'],
|
|
|
|
results['certificate']['supports'],
|
|
|
|
None,
|
|
|
|
None)
|
|
|
|
except DecodeError:
|
|
|
|
pass
|
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
if 'claim' in results:
|
2017-04-07 02:42:07 +02:00
|
|
|
claim = results['claim']
|
|
|
|
if 'has_signature' in claim and claim['has_signature']:
|
|
|
|
if not claim['signature_is_valid']:
|
2017-04-09 22:10:07 +02:00
|
|
|
log.warning("lbry://%s#%s has an invalid signature",
|
|
|
|
claim['name'], claim['claim_id'])
|
2016-08-08 07:46:19 +02:00
|
|
|
try:
|
2017-04-10 19:26:47 +02:00
|
|
|
decoded = smart_decode(claim['value'])
|
2017-04-07 02:42:07 +02:00
|
|
|
claim_dict = decoded.claim_dict
|
|
|
|
outpoint = ClaimOutpoint(claim['txid'], claim['nout'])
|
|
|
|
name = claim['name']
|
|
|
|
claim['value'] = claim_dict
|
2017-04-09 22:10:07 +02:00
|
|
|
claim['hex'] = decoded.serialized.encode('hex')
|
2017-04-24 00:16:03 +02:00
|
|
|
if update_caches:
|
2017-05-11 20:19:52 +02:00
|
|
|
if decoded.is_stream:
|
|
|
|
yield self._save_name_metadata(name, outpoint, decoded.source_hash)
|
2017-04-24 00:16:03 +02:00
|
|
|
yield self._update_claimid(claim['claim_id'], name, outpoint)
|
|
|
|
yield self._storage.save_claim_to_cache(claim['claim_id'],
|
|
|
|
claim['claim_sequence'],
|
|
|
|
decoded, claim['address'],
|
|
|
|
claim['height'],
|
|
|
|
claim['amount'], claim['supports'],
|
|
|
|
claim.get('channel_name', None),
|
|
|
|
claim.get('signature_is_valid', None))
|
2017-04-09 22:10:07 +02:00
|
|
|
except DecodeError:
|
|
|
|
claim['hex'] = claim['value']
|
|
|
|
claim['value'] = None
|
|
|
|
claim['error'] = "Failed to decode value"
|
|
|
|
|
2017-04-24 00:16:03 +02:00
|
|
|
results['claim'] = claim
|
2017-04-09 22:10:07 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
elif 'value' in results:
|
|
|
|
if 'has_signature' in results and results['has_signature']:
|
|
|
|
if not results['signature_is_valid']:
|
2017-04-09 22:10:07 +02:00
|
|
|
log.warning("lbry://%s#%s has an invalid signature",
|
|
|
|
results['name'], results['claim_id'])
|
2017-04-07 02:42:07 +02:00
|
|
|
try:
|
|
|
|
decoded = ClaimDict.load_dict(results['value'])
|
|
|
|
claim_dict = decoded.claim_dict
|
2017-04-09 22:10:07 +02:00
|
|
|
claim_hex = decoded.serialized.encode('hex')
|
|
|
|
claim_err = None
|
2017-04-07 02:42:07 +02:00
|
|
|
outpoint = ClaimOutpoint(results['txid'], results['nout'])
|
|
|
|
name = results['name']
|
2017-04-24 00:16:03 +02:00
|
|
|
if update_caches:
|
2017-05-11 20:19:52 +02:00
|
|
|
if decoded.is_stream:
|
|
|
|
yield self._save_name_metadata(name, outpoint, decoded.source_hash)
|
2017-04-24 00:16:03 +02:00
|
|
|
yield self._update_claimid(results['claim_id'], name, outpoint)
|
|
|
|
yield self._storage.save_claim_to_cache(results['claim_id'],
|
|
|
|
results.get('claim_sequence', None),
|
|
|
|
decoded, results['address'],
|
|
|
|
results['height'], results['amount'],
|
|
|
|
results.get('supports', '[]'),
|
|
|
|
results.get('channel_name', None),
|
|
|
|
results.get('signature_is_valid',
|
|
|
|
None))
|
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
except DecodeError:
|
|
|
|
claim_dict = None
|
|
|
|
claim_hex = results['value']
|
|
|
|
claim_err = "Failed to decode value"
|
|
|
|
if claim_err:
|
|
|
|
results['error'] = claim_err
|
|
|
|
results['hex'] = claim_hex
|
2017-04-07 02:42:07 +02:00
|
|
|
results['value'] = claim_dict
|
2017-04-09 22:10:07 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
defer.returnValue(results)
|
2016-08-08 07:46:19 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-04-24 00:16:03 +02:00
|
|
|
def resolve_uri(self, uri, check_cache=True):
|
|
|
|
cached_claim = None
|
|
|
|
if check_cache:
|
|
|
|
cached_claim = yield self._storage.get_cached_claim_for_uri(uri, check_cache)
|
|
|
|
if cached_claim:
|
|
|
|
log.debug("Using cached results for %s", uri)
|
|
|
|
resolve_results = cached_claim
|
2017-04-07 02:42:07 +02:00
|
|
|
else:
|
2017-04-24 00:16:03 +02:00
|
|
|
log.info("Resolving %s", uri)
|
|
|
|
resolve_results = yield self._get_value_for_uri(uri)
|
|
|
|
|
|
|
|
claim_id = None
|
|
|
|
if resolve_results and 'claim' in resolve_results:
|
|
|
|
claim_id = resolve_results['claim']['claim_id']
|
|
|
|
certificate_id = None
|
|
|
|
if resolve_results and 'certificate' in resolve_results:
|
|
|
|
certificate_id = resolve_results['certificate']['claim_id']
|
|
|
|
|
|
|
|
result = yield self._handle_claim_result(resolve_results, cached_claim is None)
|
|
|
|
if claim_id:
|
|
|
|
yield self._storage.save_claim_to_uri_cache(uri, claim_id, certificate_id)
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
defer.returnValue(result)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2017-04-24 00:16:03 +02:00
|
|
|
def get_claim_by_outpoint(self, claim_outpoint, check_expire=True):
|
|
|
|
claim_id = yield self._storage.get_claimid_for_tx(claim_outpoint)
|
|
|
|
txid, nout = claim_outpoint['txid'], claim_outpoint['nout']
|
|
|
|
if claim_id:
|
|
|
|
cached_claim = yield self._storage.get_cached_claim(claim_id, check_expire)
|
|
|
|
else:
|
|
|
|
cached_claim = None
|
|
|
|
if not cached_claim:
|
|
|
|
claim = yield self._get_claim_by_outpoint(txid, nout)
|
|
|
|
result = yield self._handle_claim_result(claim)
|
|
|
|
else:
|
|
|
|
result = cached_claim
|
2017-04-07 02:42:07 +02:00
|
|
|
defer.returnValue(result)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_claim_by_name(self, name):
|
|
|
|
get_name_result = yield self._get_value_for_name(name)
|
|
|
|
result = yield self._handle_claim_result(get_name_result)
|
|
|
|
defer.returnValue(result)
|
2016-08-08 07:46:19 +02:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-08-08 07:42:37 +02:00
|
|
|
def get_claims_for_name(self, name):
|
2017-04-07 02:42:07 +02:00
|
|
|
result = yield self._get_claims_for_name(name)
|
|
|
|
claims = result['claims']
|
|
|
|
claims_for_return = []
|
|
|
|
for claim in claims:
|
2017-04-09 22:10:07 +02:00
|
|
|
try:
|
|
|
|
decoded = smart_decode(claim['value'])
|
|
|
|
claim['value'] = decoded.claim_dict
|
|
|
|
claim['hex'] = decoded.serialized.encode('hex')
|
|
|
|
claims_for_return.append(claim)
|
|
|
|
except DecodeError:
|
|
|
|
claim['hex'] = claim['value']
|
|
|
|
claim['value'] = None
|
|
|
|
claim['error'] = "Failed to decode"
|
2017-04-23 19:33:06 +02:00
|
|
|
log.warning("Failed to decode claim value for lbry://%s#%s", claim['name'],
|
2017-04-09 22:10:07 +02:00
|
|
|
claim['claim_id'])
|
|
|
|
claims_for_return.append(claim)
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
result['claims'] = claims_for_return
|
|
|
|
defer.returnValue(result)
|
2016-07-28 20:55:17 +02:00
|
|
|
|
2017-01-16 20:05:16 +01:00
|
|
|
def _process_claim_out(self, claim_out):
|
|
|
|
claim_out.pop('success')
|
|
|
|
claim_out['fee'] = float(claim_out['fee'])
|
|
|
|
return claim_out
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
def claim_new_channel(self, channel_name, amount):
|
|
|
|
parsed_channel_name = parse_lbry_uri(channel_name)
|
|
|
|
if not parsed_channel_name.is_channel:
|
|
|
|
raise Exception("Invalid channel name")
|
|
|
|
elif (parsed_channel_name.path or parsed_channel_name.claim_id or
|
|
|
|
parsed_channel_name.bid_position or parsed_channel_name.claim_sequence):
|
2017-04-23 19:33:06 +02:00
|
|
|
raise Exception("New channel claim should have no fields other than name")
|
2017-04-10 17:01:28 +02:00
|
|
|
log.info("Preparing to make certificate claim for %s", channel_name)
|
2017-04-07 02:42:07 +02:00
|
|
|
return self._claim_certificate(parsed_channel_name.name, amount)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def channel_list(self):
|
|
|
|
certificates = yield self._get_certificate_claims()
|
|
|
|
results = []
|
|
|
|
for claim in certificates:
|
|
|
|
formatted = yield self._handle_claim_result(claim)
|
|
|
|
results.append(formatted)
|
|
|
|
defer.returnValue(results)
|
|
|
|
|
2017-02-09 04:41:15 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-06-03 02:26:03 +02:00
|
|
|
def claim_name(self, name, bid, metadata, certificate_id=None, claim_address=None):
|
2017-02-09 04:41:15 +01:00
|
|
|
"""
|
|
|
|
Claim a name, or update if name already claimed by user
|
2015-10-28 06:38:01 +01:00
|
|
|
|
2017-02-09 04:41:15 +01:00
|
|
|
@param name: str, name to claim
|
|
|
|
@param bid: float, bid amount
|
2017-04-07 02:42:07 +02:00
|
|
|
@param metadata: ClaimDict compliant dict
|
|
|
|
@param certificate_id: str (optional), claim id of channel certificate
|
2017-06-03 02:26:03 +02:00
|
|
|
@param claim_address: str (optional), address to send claim to
|
2017-02-09 16:11:46 +01:00
|
|
|
|
2017-02-09 04:41:15 +01:00
|
|
|
@return: Deferred which returns a dict containing below items
|
|
|
|
txid - txid of the resulting transaction
|
|
|
|
nout - nout of the resulting claim
|
|
|
|
fee - transaction fee paid to make claim
|
|
|
|
claim_id - claim id of the claim
|
|
|
|
"""
|
2017-04-07 02:42:07 +02:00
|
|
|
|
|
|
|
decoded = ClaimDict.load_dict(metadata)
|
|
|
|
serialized = decoded.serialized
|
|
|
|
|
|
|
|
if self.get_balance() < Decimal(bid):
|
|
|
|
raise InsufficientFundsError()
|
|
|
|
|
2017-06-03 02:26:03 +02:00
|
|
|
claim = yield self._send_name_claim(name, serialized.encode('hex'),
|
|
|
|
bid, certificate_id, claim_address)
|
2017-02-09 04:41:15 +01:00
|
|
|
|
|
|
|
if not claim['success']:
|
|
|
|
msg = 'Claim to name {} failed: {}'.format(name, claim['reason'])
|
|
|
|
raise Exception(msg)
|
|
|
|
|
|
|
|
claim = self._process_claim_out(claim)
|
|
|
|
claim_outpoint = ClaimOutpoint(claim['txid'], claim['nout'])
|
2017-04-23 19:33:06 +02:00
|
|
|
log.info("Saving metadata for claim %s %d", claim['txid'], claim['nout'])
|
2017-04-07 02:42:07 +02:00
|
|
|
yield self._update_claimid(claim['claim_id'], name, claim_outpoint)
|
|
|
|
yield self._save_name_metadata(name, claim_outpoint, decoded.source_hash)
|
2017-02-09 04:41:15 +01:00
|
|
|
defer.returnValue(claim)
|
2015-10-28 06:38:01 +01:00
|
|
|
|
2017-02-13 20:17:53 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-04-07 02:42:07 +02:00
|
|
|
def abandon_claim(self, claim_id):
|
|
|
|
claim_out = yield self._abandon_claim(claim_id)
|
2016-12-02 18:57:18 +01:00
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
if not claim_out['success']:
|
2017-04-23 19:33:06 +02:00
|
|
|
msg = 'Abandon of {} failed: {}'.format(claim_id, claim_out['reason'])
|
2017-04-07 02:42:07 +02:00
|
|
|
raise Exception(msg)
|
|
|
|
|
|
|
|
claim_out = self._process_claim_out(claim_out)
|
|
|
|
defer.returnValue(claim_out)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
def support_claim(self, name, claim_id, amount):
|
2016-12-02 18:57:18 +01:00
|
|
|
def _parse_support_claim_out(claim_out):
|
|
|
|
if not claim_out['success']:
|
2017-04-23 19:33:06 +02:00
|
|
|
msg = 'Support of {}:{} failed: {}'.format(name, claim_id, claim_out['reason'])
|
2016-12-02 18:57:18 +01:00
|
|
|
raise Exception(msg)
|
2017-01-16 20:05:16 +01:00
|
|
|
claim_out = self._process_claim_out(claim_out)
|
2016-12-02 18:57:18 +01:00
|
|
|
return defer.succeed(claim_out)
|
|
|
|
|
2017-01-12 18:51:44 +01:00
|
|
|
if self.get_balance() < amount:
|
|
|
|
raise InsufficientFundsError()
|
|
|
|
|
2016-12-02 18:57:18 +01:00
|
|
|
d = self._support_claim(name, claim_id, amount)
|
|
|
|
d.addCallback(lambda claim_out: _parse_support_claim_out(claim_out))
|
|
|
|
return d
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2016-09-09 06:48:54 +02:00
|
|
|
def get_block_info(self, height):
|
|
|
|
d = self._get_blockhash(height)
|
|
|
|
return d
|
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def get_history(self):
|
|
|
|
d = self._get_history()
|
|
|
|
return d
|
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def address_is_mine(self, address):
|
|
|
|
d = self._address_is_mine(address)
|
|
|
|
return d
|
|
|
|
|
2016-10-14 08:13:37 +02:00
|
|
|
def get_transaction(self, txid):
|
|
|
|
d = self._get_transaction(txid)
|
2016-08-23 00:43:52 +02:00
|
|
|
return d
|
|
|
|
|
2016-10-03 22:37:27 +02:00
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
|
|
|
return self._get_claim_metadata_for_sd_hash(sd_hash)
|
|
|
|
|
2017-04-23 19:33:06 +02:00
|
|
|
def get_balance(self):
|
|
|
|
return self.wallet_balance - self.total_reserved_points - sum(self.queued_payments.values())
|
2017-04-21 03:59:40 +02:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _check_expected_balances(self):
|
|
|
|
now = datetime.datetime.now()
|
|
|
|
balances_to_check = []
|
|
|
|
try:
|
|
|
|
while self.expected_balance_at_time[0][3] < now:
|
2017-04-23 19:33:06 +02:00
|
|
|
balances_to_check.append(self.expected_balance_at_time.popleft())
|
2016-02-17 05:10:26 +01:00
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
ds = []
|
|
|
|
for balance_to_check in balances_to_check:
|
2017-04-23 19:33:06 +02:00
|
|
|
log.debug("Checking balance of address %s", str(balance_to_check[1]))
|
2016-02-17 05:10:26 +01:00
|
|
|
d = self._get_balance_for_address(balance_to_check[1])
|
|
|
|
d.addCallback(lambda bal: bal >= balance_to_check[2])
|
|
|
|
ds.append(d)
|
|
|
|
dl = defer.DeferredList(ds)
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def handle_checks(results):
|
|
|
|
from future_builtins import zip
|
|
|
|
for balance, (success, result) in zip(balances_to_check, results):
|
|
|
|
peer = balance[0]
|
|
|
|
if success is True:
|
|
|
|
if result is False:
|
|
|
|
if balance[4] <= 1: # first or second strike, give them another chance
|
2017-01-25 23:00:21 +01:00
|
|
|
new_expected_balance = (
|
|
|
|
balance[0],
|
|
|
|
balance[1],
|
|
|
|
balance[2],
|
2017-04-23 19:33:06 +02:00
|
|
|
datetime.datetime.now() + self.max_expected_payment_time,
|
2017-01-25 23:00:21 +01:00
|
|
|
balance[4] + 1,
|
|
|
|
balance[5]
|
|
|
|
)
|
2017-04-23 19:33:06 +02:00
|
|
|
self.expected_balance_at_time.append(new_expected_balance)
|
2016-02-17 05:10:26 +01:00
|
|
|
peer.update_score(-5.0)
|
|
|
|
else:
|
|
|
|
peer.update_score(-50.0)
|
|
|
|
else:
|
|
|
|
if balance[4] == 0:
|
|
|
|
peer.update_score(balance[5])
|
|
|
|
peer.update_stats('points_received', balance[5])
|
|
|
|
else:
|
|
|
|
log.warning("Something went wrong checking a balance. Peer: %s, account: %s,"
|
|
|
|
"expected balance: %s, expected time: %s, count: %s, error: %s",
|
2017-04-23 19:33:06 +02:00
|
|
|
str(balance[0]), str(balance[1]), str(balance[2]), str(balance[3]),
|
2016-02-17 05:10:26 +01:00
|
|
|
str(balance[4]), str(result.getErrorMessage()))
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
dl.addCallback(handle_checks)
|
|
|
|
return dl
|
2016-01-26 02:28:05 +01:00
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== Must be overridden ======== #
|
2016-02-17 05:10:26 +01:00
|
|
|
|
2017-01-12 18:51:44 +01:00
|
|
|
def _update_balance(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def get_new_address(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2017-04-23 19:33:06 +02:00
|
|
|
def get_address_balance(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def get_block(self, blockhash):
|
|
|
|
return defer.fail(NotImplementedError())
|
2015-12-04 02:56:56 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_best_blockhash(self):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def get_name_claims(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-08 07:42:37 +02:00
|
|
|
def _get_claims_for_name(self, name):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
def _claim_certificate(self, name, amount):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2017-06-03 02:26:03 +02:00
|
|
|
def _send_name_claim(self, name, val, amount, certificate_id=None, claim_address=None):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
def _abandon_claim(self, claim_id):
|
2016-06-28 20:28:59 +02:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-08 08:32:56 +02:00
|
|
|
def _support_claim(self, name, claim_id, amount):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
def _do_send_many(self, payments_to_send):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-17 17:47:39 +01:00
|
|
|
def get_claims_from_tx(self, txid):
|
2016-02-17 05:10:26 +01:00
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def _get_history(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def _address_is_mine(self, address):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2017-04-07 02:42:07 +02:00
|
|
|
def _get_value_for_uri(self, uri):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_certificate_claims(self):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_claim_by_outpoint(self, txid, nout):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
|
|
|
def _get_claim_by_claimid(self, claim_id):
|
|
|
|
return defer.fail(NotImplementedError())
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _start(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _stop(self):
|
|
|
|
pass
|
|
|
|
|
2016-02-17 05:10:26 +01:00
|
|
|
|
2016-09-27 20:18:35 +02:00
|
|
|
class LBRYumWallet(Wallet):
|
2016-12-01 06:28:25 +01:00
|
|
|
def __init__(self, storage, config=None):
|
|
|
|
Wallet.__init__(self, storage)
|
2016-10-04 20:58:44 +02:00
|
|
|
self._config = config
|
2017-04-26 07:31:23 +02:00
|
|
|
self.config = make_config(self._config)
|
2016-02-19 06:44:08 +01:00
|
|
|
self.network = None
|
|
|
|
self.wallet = None
|
2017-01-03 20:13:01 +01:00
|
|
|
self.is_first_run = False
|
2016-02-23 05:31:07 +01:00
|
|
|
self.printed_retrieving_headers = False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = None
|
|
|
|
self._catch_up_check = None
|
|
|
|
self._caught_up_counter = 0
|
2016-04-18 01:54:04 +02:00
|
|
|
self._lag_counter = 0
|
2017-01-03 20:13:01 +01:00
|
|
|
self.blocks_behind = 0
|
2016-04-12 08:03:57 +02:00
|
|
|
self.catchup_progress = 0
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2017-01-28 17:06:29 +01:00
|
|
|
def _is_first_run(self):
|
|
|
|
return (not self.printed_retrieving_headers and
|
|
|
|
self.network.blockchain.retrieving_headers)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def _start(self):
|
|
|
|
network_start_d = defer.Deferred()
|
|
|
|
|
|
|
|
def setup_network():
|
|
|
|
self.network = Network(self.config)
|
2017-01-28 17:00:39 +01:00
|
|
|
log.info("Loading the wallet")
|
2016-02-19 06:44:08 +01:00
|
|
|
return defer.succeed(self.network.start())
|
|
|
|
|
|
|
|
def check_started():
|
|
|
|
if self.network.is_connecting():
|
2017-01-28 17:06:29 +01:00
|
|
|
if self._is_first_run():
|
2017-04-23 19:33:06 +02:00
|
|
|
log.info("Running the wallet for the first time. This may take a moment.")
|
2016-02-23 05:31:07 +01:00
|
|
|
self.printed_retrieving_headers = True
|
2016-02-19 06:44:08 +01:00
|
|
|
return False
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
2016-02-19 06:44:08 +01:00
|
|
|
if self.network.is_connected():
|
|
|
|
network_start_d.callback(True)
|
|
|
|
else:
|
2017-04-23 19:33:06 +02:00
|
|
|
network_start_d.errback(ValueError("Failed to connect to network."))
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-03-23 03:42:45 +01:00
|
|
|
self._start_check = task.LoopingCall(check_started)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2017-01-28 19:55:59 +01:00
|
|
|
d = setup_network()
|
2016-12-19 21:24:14 +01:00
|
|
|
d.addCallback(lambda _: self._load_wallet())
|
|
|
|
d.addCallback(self._save_wallet)
|
2016-03-23 03:42:45 +01:00
|
|
|
d.addCallback(lambda _: self._start_check.start(.1))
|
2016-02-19 06:44:08 +01:00
|
|
|
d.addCallback(lambda _: network_start_d)
|
2016-12-19 21:24:14 +01:00
|
|
|
d.addCallback(lambda _: self._load_blockchain())
|
2017-04-24 06:19:46 +02:00
|
|
|
d.addCallback(lambda _: log.info("Subscribing to addresses"))
|
|
|
|
d.addCallback(lambda _: self.wallet.wait_until_synchronized(lambda _: None))
|
|
|
|
d.addCallback(lambda _: log.info("Synchronized wallet"))
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _stop(self):
|
2016-03-23 03:42:45 +01:00
|
|
|
if self._start_check is not None:
|
|
|
|
self._start_check.stop()
|
|
|
|
self._start_check = None
|
|
|
|
|
|
|
|
if self._catch_up_check is not None:
|
2016-10-19 06:12:44 +02:00
|
|
|
if self._catch_up_check.running:
|
|
|
|
self._catch_up_check.stop()
|
2016-03-23 03:42:45 +01:00
|
|
|
self._catch_up_check = None
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
d = defer.Deferred()
|
|
|
|
|
|
|
|
def check_stopped():
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
if self.network.is_connected():
|
|
|
|
return False
|
2016-02-19 06:44:08 +01:00
|
|
|
stop_check.stop()
|
|
|
|
self.network = None
|
|
|
|
d.callback(True)
|
|
|
|
|
2017-04-30 21:31:28 +02:00
|
|
|
if self.wallet:
|
|
|
|
self.wallet.stop_threads()
|
|
|
|
log.info("Stopped wallet")
|
2016-03-24 03:27:48 +01:00
|
|
|
if self.network:
|
|
|
|
self.network.stop()
|
2017-04-30 21:31:28 +02:00
|
|
|
log.info("Stopped connection to lbryum server")
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
stop_check = task.LoopingCall(check_stopped)
|
|
|
|
stop_check.start(.1)
|
|
|
|
return d
|
|
|
|
|
|
|
|
def _load_wallet(self):
|
2016-12-19 21:24:14 +01:00
|
|
|
path = self.config.get_wallet_path()
|
|
|
|
storage = lbryum.wallet.WalletStorage(path)
|
|
|
|
wallet = lbryum.wallet.Wallet(storage)
|
|
|
|
if not storage.file_exists:
|
2017-01-03 20:13:01 +01:00
|
|
|
self.is_first_run = True
|
2016-12-19 21:24:14 +01:00
|
|
|
seed = wallet.make_seed()
|
|
|
|
wallet.add_seed(seed, None)
|
|
|
|
wallet.create_master_keys(None)
|
|
|
|
wallet.create_main_account()
|
|
|
|
wallet.synchronize()
|
|
|
|
self.wallet = wallet
|
2017-01-24 00:55:20 +01:00
|
|
|
self._check_large_wallet()
|
2016-12-19 21:24:14 +01:00
|
|
|
return defer.succeed(True)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2017-01-24 00:55:20 +01:00
|
|
|
def _check_large_wallet(self):
|
|
|
|
if len(self.wallet.addresses(include_change=False)) > 1000:
|
2017-04-23 19:33:06 +02:00
|
|
|
log.warning(("Your wallet is excessively large, please follow instructions here: ",
|
|
|
|
"https://github.com/lbryio/lbry/issues/437 to reduce your wallet size"))
|
2017-01-24 00:55:20 +01:00
|
|
|
|
2016-12-19 21:24:14 +01:00
|
|
|
def _load_blockchain(self):
|
2016-03-12 20:58:58 +01:00
|
|
|
blockchain_caught_d = defer.Deferred()
|
|
|
|
|
2017-01-29 06:47:33 +01:00
|
|
|
def on_update_callback(event, *args):
|
|
|
|
# This callback is called by lbryum when something chain
|
|
|
|
# related has happened
|
|
|
|
local_height = self.network.get_local_height()
|
2016-03-12 20:58:58 +01:00
|
|
|
remote_height = self.network.get_server_height()
|
2017-01-29 06:47:33 +01:00
|
|
|
updated_blocks_behind = self.network.get_blocks_behind()
|
|
|
|
log.info(
|
|
|
|
'Local Height: %s, remote height: %s, behind: %s',
|
|
|
|
local_height, remote_height, updated_blocks_behind)
|
2016-03-12 20:58:58 +01:00
|
|
|
|
2017-01-29 06:47:33 +01:00
|
|
|
self.blocks_behind = updated_blocks_behind
|
|
|
|
if local_height != remote_height:
|
2017-01-03 20:13:01 +01:00
|
|
|
return
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2017-01-29 06:47:33 +01:00
|
|
|
assert self.blocks_behind == 0
|
|
|
|
self.network.unregister_callback(on_update_callback)
|
|
|
|
log.info("Wallet Loaded")
|
|
|
|
reactor.callFromThread(blockchain_caught_d.callback, True)
|
2016-04-18 01:54:04 +02:00
|
|
|
|
2017-01-29 06:47:33 +01:00
|
|
|
self.network.register_callback(on_update_callback, ['updated'])
|
2016-04-12 08:03:57 +02:00
|
|
|
|
2016-12-19 21:24:14 +01:00
|
|
|
d = defer.succeed(self.wallet.start_threads(self.network))
|
2016-03-12 20:58:58 +01:00
|
|
|
d.addCallback(lambda _: blockchain_caught_d)
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
|
|
|
def _get_cmd_runner(self):
|
2016-12-19 21:34:15 +01:00
|
|
|
return Commands(self.config, self.wallet, self.network)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-12-12 20:00:23 +01:00
|
|
|
# run commands as a defer.succeed,
|
|
|
|
# lbryum commands should be run this way , unless if the command
|
|
|
|
# only makes a lbrum server query, use _run_cmd_as_defer_to_thread()
|
2017-04-07 02:42:07 +02:00
|
|
|
def _run_cmd_as_defer_succeed(self, command_name, *args, **kwargs):
|
2016-12-19 21:34:15 +01:00
|
|
|
cmd_runner = self._get_cmd_runner()
|
2016-12-12 20:00:23 +01:00
|
|
|
cmd = known_commands[command_name]
|
2016-12-19 21:34:15 +01:00
|
|
|
func = getattr(cmd_runner, cmd.name)
|
2017-04-07 02:42:07 +02:00
|
|
|
return defer.succeed(func(*args, **kwargs))
|
2016-12-12 20:00:23 +01:00
|
|
|
|
|
|
|
# run commands as a deferToThread, lbryum commands that only make
|
|
|
|
# queries to lbryum server should be run this way
|
2017-01-06 15:04:18 +01:00
|
|
|
# TODO: keep track of running threads and cancel them on `stop`
|
2017-04-23 19:33:06 +02:00
|
|
|
# otherwise the application will hang, waiting for threads to complete
|
2017-04-07 02:42:07 +02:00
|
|
|
def _run_cmd_as_defer_to_thread(self, command_name, *args, **kwargs):
|
2016-12-19 21:34:15 +01:00
|
|
|
cmd_runner = self._get_cmd_runner()
|
2016-12-12 20:00:23 +01:00
|
|
|
cmd = known_commands[command_name]
|
2016-12-19 21:34:15 +01:00
|
|
|
func = getattr(cmd_runner, cmd.name)
|
2017-04-07 02:42:07 +02:00
|
|
|
return threads.deferToThread(func, *args, **kwargs)
|
2016-12-12 20:00:23 +01:00
|
|
|
|
2017-01-12 18:51:44 +01:00
|
|
|
def _update_balance(self):
|
2016-08-30 04:33:10 +02:00
|
|
|
accounts = None
|
|
|
|
exclude_claimtrietx = True
|
2017-04-23 19:33:06 +02:00
|
|
|
d = self._run_cmd_as_defer_succeed('getbalance', accounts, exclude_claimtrietx)
|
2017-01-25 17:44:21 +01:00
|
|
|
d.addCallback(
|
|
|
|
lambda result: Decimal(result['confirmed']) + Decimal(result.get('unconfirmed', 0.0)))
|
2016-02-19 06:44:08 +01:00
|
|
|
return d
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
# Always create and return a brand new address
|
|
|
|
@defer.inlineCallbacks
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_new_address(self):
|
2017-03-27 18:41:53 +02:00
|
|
|
addr = self.wallet.create_new_address(account=None)
|
|
|
|
yield self._save_wallet()
|
|
|
|
defer.returnValue(addr)
|
|
|
|
|
2017-04-23 19:33:06 +02:00
|
|
|
# Get the balance of a given address.
|
|
|
|
|
|
|
|
def get_address_balance(self, address, include_balance=False):
|
|
|
|
c, u, x = self.wallet.get_addr_balance(address)
|
|
|
|
if include_balance is False:
|
|
|
|
return Decimal(float(c) / COIN)
|
|
|
|
else:
|
|
|
|
return Decimal((float(c) + float(u) + float(x)) / COIN)
|
|
|
|
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
# Return an address with no balance in it, if
|
|
|
|
# there is none, create a brand new address
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_unused_address(self):
|
2017-01-19 02:15:40 +01:00
|
|
|
addr = self.wallet.get_unused_address(account=None)
|
|
|
|
if addr is None:
|
|
|
|
addr = self.wallet.create_new_address()
|
2017-03-27 18:41:53 +02:00
|
|
|
yield self._save_wallet()
|
|
|
|
defer.returnValue(addr)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_block(self, blockhash):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getblock', blockhash)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_most_recent_blocktime(self):
|
2017-02-24 01:45:07 +01:00
|
|
|
height = self.network.get_local_height()
|
|
|
|
if height < 0:
|
|
|
|
return defer.succeed(None)
|
2016-02-19 06:44:08 +01:00
|
|
|
header = self.network.get_header(self.network.get_local_height())
|
|
|
|
return defer.succeed(header['timestamp'])
|
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
|
|
|
height = self.network.get_local_height()
|
2017-02-24 01:45:07 +01:00
|
|
|
if height < 0:
|
|
|
|
return defer.succeed(None)
|
2016-12-12 20:00:23 +01:00
|
|
|
header = self.network.blockchain.read_header(height)
|
|
|
|
return defer.succeed(self.network.blockchain.hash_header(header))
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-09-09 06:48:54 +02:00
|
|
|
def _get_blockhash(self, height):
|
2016-12-12 20:00:23 +01:00
|
|
|
header = self.network.blockchain.read_header(height)
|
|
|
|
return defer.succeed(self.network.blockchain.hash_header(header))
|
2016-09-09 06:48:54 +02:00
|
|
|
|
2017-03-21 17:10:48 +01:00
|
|
|
def _get_transaction(self, txid):
|
|
|
|
return self._run_cmd_as_defer_to_thread("gettransaction", txid)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
def get_name_claims(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('getnameclaims')
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-08-08 07:42:37 +02:00
|
|
|
def _get_claims_for_name(self, name):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimsforname', name)
|
|
|
|
|
2017-03-28 23:13:59 +02:00
|
|
|
@defer.inlineCallbacks
|
2017-06-03 02:26:03 +02:00
|
|
|
def _send_name_claim(self, name, value, amount,
|
|
|
|
certificate_id=None, claim_address=None):
|
2017-04-07 02:42:07 +02:00
|
|
|
log.info("Send claim: %s for %s: %s ", name, amount, value)
|
2017-04-23 19:33:06 +02:00
|
|
|
claim_out = yield self._run_cmd_as_defer_succeed('claim', name, value, amount,
|
2017-06-03 02:26:03 +02:00
|
|
|
certificate_id=certificate_id,
|
|
|
|
claim_addr=claim_address)
|
2017-03-28 23:13:59 +02:00
|
|
|
defer.returnValue(claim_out)
|
2016-08-09 03:32:39 +02:00
|
|
|
|
2017-03-23 01:27:01 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-04-07 02:42:07 +02:00
|
|
|
def _abandon_claim(self, claim_id):
|
|
|
|
log.debug("Abandon %s" % claim_id)
|
|
|
|
tx_out = yield self._run_cmd_as_defer_succeed('abandon', claim_id)
|
|
|
|
defer.returnValue(tx_out)
|
2016-02-26 07:45:52 +01:00
|
|
|
|
2017-03-28 23:13:59 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-08-08 08:32:56 +02:00
|
|
|
def _support_claim(self, name, claim_id, amount):
|
2016-11-23 22:27:23 +01:00
|
|
|
log.debug("Support %s %s %f" % (name, claim_id, amount))
|
2016-12-12 20:00:23 +01:00
|
|
|
broadcast = False
|
2017-03-28 23:13:59 +02:00
|
|
|
tx = yield self._run_cmd_as_defer_succeed('support', name, claim_id, amount, broadcast)
|
|
|
|
claim_out = yield self._broadcast_claim_transaction(tx)
|
|
|
|
defer.returnValue(claim_out)
|
2016-08-08 08:32:56 +02:00
|
|
|
|
2017-03-28 23:13:59 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-12-12 20:00:23 +01:00
|
|
|
def _broadcast_claim_transaction(self, claim_out):
|
|
|
|
if 'success' not in claim_out:
|
2017-04-23 19:33:06 +02:00
|
|
|
raise Exception('Unexpected claim command output: {}'.format(claim_out))
|
2016-12-12 20:00:23 +01:00
|
|
|
if claim_out['success']:
|
2017-03-28 23:13:59 +02:00
|
|
|
yield self._broadcast_transaction(claim_out['tx'])
|
|
|
|
defer.returnValue(claim_out)
|
2016-12-12 20:00:23 +01:00
|
|
|
|
2017-03-28 23:13:59 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-02-26 07:45:52 +01:00
|
|
|
def _broadcast_transaction(self, raw_tx):
|
2017-05-10 07:25:39 +02:00
|
|
|
txid = yield self._run_cmd_as_defer_succeed('broadcast', raw_tx)
|
2017-03-28 23:13:59 +02:00
|
|
|
log.info("Broadcast tx: %s", txid)
|
|
|
|
if len(txid) != 64:
|
|
|
|
raise Exception("Transaction rejected. Raw tx: {}".format(raw_tx))
|
|
|
|
defer.returnValue(txid)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _do_send_many(self, payments_to_send):
|
2016-12-12 20:00:23 +01:00
|
|
|
def broadcast_send_many(paytomany_out):
|
|
|
|
if 'hex' not in paytomany_out:
|
2017-04-24 00:16:03 +02:00
|
|
|
raise Exception('Unexpected paytomany output:{}'.format(paytomany_out))
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._broadcast_transaction(paytomany_out['hex'])
|
2017-01-25 17:44:21 +01:00
|
|
|
|
2017-04-23 19:33:06 +02:00
|
|
|
log.debug("Doing send many. payments to send: %s", str(payments_to_send))
|
|
|
|
d = self._run_cmd_as_defer_succeed('paytomany', payments_to_send.iteritems())
|
2016-12-12 20:00:23 +01:00
|
|
|
d.addCallback(lambda out: broadcast_send_many(out))
|
|
|
|
return d
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _get_value_for_name(self, name):
|
2017-04-07 02:42:07 +02:00
|
|
|
if not name:
|
|
|
|
raise Exception("No name given")
|
|
|
|
return self._run_cmd_as_defer_to_thread('getvalueforname', name)
|
|
|
|
|
|
|
|
def _get_value_for_uri(self, uri):
|
|
|
|
if not uri:
|
|
|
|
raise Exception("No uri given")
|
|
|
|
return self._run_cmd_as_defer_to_thread('getvalueforuri', uri)
|
|
|
|
|
|
|
|
def _claim_certificate(self, name, amount):
|
2017-05-10 07:25:39 +02:00
|
|
|
return self._run_cmd_as_defer_succeed('claimcertificate', name, amount)
|
2017-04-07 02:42:07 +02:00
|
|
|
|
|
|
|
def _get_certificate_claims(self):
|
|
|
|
return self._run_cmd_as_defer_succeed('getcertificateclaims')
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def get_claims_from_tx(self, txid):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimsfromtx', txid)
|
2017-04-07 02:42:07 +02:00
|
|
|
|
|
|
|
def _get_claim_by_outpoint(self, txid, nout):
|
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimbyoutpoint', txid, nout)
|
|
|
|
|
2017-04-09 22:10:07 +02:00
|
|
|
def _get_claim_by_claimid(self, claim_id):
|
2017-04-07 02:42:07 +02:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimbyid', claim_id)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
|
|
|
def _get_balance_for_address(self, address):
|
2017-01-25 17:44:21 +01:00
|
|
|
return defer.succeed(Decimal(self.wallet.get_addr_received(address)) / COIN)
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2016-02-22 20:24:49 +01:00
|
|
|
def get_nametrie(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_to_thread('getclaimtrie')
|
2016-02-22 20:24:49 +01:00
|
|
|
|
2016-08-19 04:15:49 +02:00
|
|
|
def _get_history(self):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('history')
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2016-09-02 07:27:30 +02:00
|
|
|
def _address_is_mine(self, address):
|
2016-12-12 20:00:23 +01:00
|
|
|
return self._run_cmd_as_defer_succeed('ismine', address)
|
2016-09-02 07:27:30 +02:00
|
|
|
|
2017-03-17 20:07:18 +01:00
|
|
|
# returns a list of public keys associated with address
|
|
|
|
# (could be multiple public keys if a multisig address)
|
|
|
|
def get_pub_keys(self, address):
|
|
|
|
return self._run_cmd_as_defer_succeed('getpubkeys', address)
|
2016-04-18 05:23:20 +02:00
|
|
|
|
2017-03-19 15:51:39 +01:00
|
|
|
def list_addresses(self):
|
|
|
|
return self._run_cmd_as_defer_succeed('listaddresses')
|
|
|
|
|
2017-03-27 18:41:53 +02:00
|
|
|
def _save_wallet(self, val=None):
|
2016-12-12 20:00:23 +01:00
|
|
|
self.wallet.storage.write()
|
|
|
|
return defer.succeed(val)
|
|
|
|
|
2016-02-19 06:44:08 +01:00
|
|
|
|
2015-08-20 17:27:15 +02:00
|
|
|
class LBRYcrdAddressRequester(object):
|
|
|
|
implements([IRequestCreator])
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IRequestCreator ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'lbrycrd_address': True}, 'lbrycrd_address')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_address_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== internal calls ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def _handle_address_response(self, response_dict, peer, request, protocol):
|
2017-01-28 17:06:29 +01:00
|
|
|
if request.response_identifier not in response_dict:
|
|
|
|
raise ValueError(
|
|
|
|
"Expected {} in response but did not get it".format(request.response_identifier))
|
2015-08-20 17:27:15 +02:00
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
address = response_dict[request.response_identifier]
|
|
|
|
self.wallet.update_peer_address(peer, address)
|
|
|
|
|
|
|
|
def _request_failed(self, err, peer):
|
|
|
|
if not err.check(RequestCanceledError):
|
2017-04-23 19:33:06 +02:00
|
|
|
log.warning("A peer failed to send a valid public key response. Error: %s, peer: %s",
|
2015-09-08 21:42:56 +02:00
|
|
|
err.getErrorMessage(), str(peer))
|
2016-01-22 21:50:18 +01:00
|
|
|
return err
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandlerFactory(object):
|
|
|
|
implements(IQueryHandlerFactory)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IQueryHandlerFactory ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = LBRYcrdAddressQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'lbrycrd_address'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return "LBRYcrd Address - an address for receiving payments via LBRYcrd"
|
|
|
|
|
|
|
|
|
|
|
|
class LBRYcrdAddressQueryHandler(object):
|
|
|
|
implements(IQueryHandler)
|
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['lbrycrd_address']
|
|
|
|
self.address = None
|
|
|
|
self.peer = None
|
|
|
|
|
2017-01-25 17:44:21 +01:00
|
|
|
# ======== IQueryHandler ======== #
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
|
|
|
def handle_queries(self, queries):
|
|
|
|
|
|
|
|
def create_response(address):
|
|
|
|
self.address = address
|
|
|
|
fields = {'lbrycrd_address': address}
|
|
|
|
return fields
|
|
|
|
|
|
|
|
if self.query_identifiers[0] in queries:
|
2017-03-27 18:41:53 +02:00
|
|
|
d = self.wallet.get_unused_address_for_peer(self.peer)
|
2015-08-20 17:27:15 +02:00
|
|
|
d.addCallback(create_response)
|
|
|
|
return d
|
|
|
|
if self.address is None:
|
2017-04-23 19:33:06 +02:00
|
|
|
log.warning("Expected a request for an address, but did not receive one")
|
2017-01-25 17:44:21 +01:00
|
|
|
return defer.fail(
|
|
|
|
Failure(ValueError("Expected but did not receive an address request")))
|
2015-08-20 17:27:15 +02:00
|
|
|
else:
|
2016-07-04 22:40:52 +02:00
|
|
|
return defer.succeed({})
|
2016-10-04 20:58:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def make_config(config=None):
|
|
|
|
if config is None:
|
|
|
|
config = {}
|
2017-01-25 17:44:21 +01:00
|
|
|
return SimpleConfig(config) if isinstance(config, dict) else config
|