2018-05-25 08:03:25 +02:00
|
|
|
import os
|
2018-06-26 23:22:05 +02:00
|
|
|
import logging
|
2018-05-25 08:03:25 +02:00
|
|
|
from binascii import hexlify, unhexlify
|
2018-07-17 06:09:02 +02:00
|
|
|
from typing import Dict, Type, Iterable
|
2018-05-25 08:03:25 +02:00
|
|
|
from operator import itemgetter
|
2018-06-26 23:22:05 +02:00
|
|
|
from collections import namedtuple
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-07-17 06:09:02 +02:00
|
|
|
from twisted.internet import defer
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
from torba import baseaccount
|
|
|
|
from torba import basenetwork
|
|
|
|
from torba import basetransaction
|
2018-08-16 06:43:38 +02:00
|
|
|
from torba.basedatabase import BaseDatabase
|
2018-08-16 06:56:46 +02:00
|
|
|
from torba.baseheader import BaseHeaders
|
2018-07-29 02:52:54 +02:00
|
|
|
from torba.coinselection import CoinSelector
|
|
|
|
from torba.constants import COIN, NULL_HASH32
|
|
|
|
from torba.stream import StreamController
|
|
|
|
from torba.hash import hash160, double_sha256, sha256, Base58
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-26 23:22:05 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
LedgerType = Type['BaseLedger']
|
|
|
|
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
class LedgerRegistry(type):
|
2018-07-29 02:52:54 +02:00
|
|
|
|
|
|
|
ledgers: Dict[str, LedgerType] = {}
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
def __new__(mcs, name, bases, attrs):
|
2018-07-29 02:52:54 +02:00
|
|
|
cls: LedgerType = super().__new__(mcs, name, bases, attrs)
|
2018-06-11 15:33:32 +02:00
|
|
|
if not (name == 'BaseLedger' and not bases):
|
|
|
|
ledger_id = cls.get_id()
|
|
|
|
assert ledger_id not in mcs.ledgers,\
|
|
|
|
'Ledger with id "{}" already registered.'.format(ledger_id)
|
|
|
|
mcs.ledgers[ledger_id] = cls
|
|
|
|
return cls
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
@classmethod
|
2018-07-29 02:52:54 +02:00
|
|
|
def get_ledger_class(mcs, ledger_id: str) -> LedgerType:
|
2018-06-11 15:33:32 +02:00
|
|
|
return mcs.ledgers[ledger_id]
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
|
2018-10-03 13:08:02 +02:00
|
|
|
class TransactionEvent(namedtuple('TransactionEvent', ('address', 'tx'))):
|
2018-06-26 23:22:05 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-08-17 03:41:22 +02:00
|
|
|
class BlockHeightEvent(namedtuple('BlockHeightEvent', ('height', 'change'))):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
class BaseLedger(metaclass=LedgerRegistry):
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
name: str
|
|
|
|
symbol: str
|
|
|
|
network_name: str
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-08-16 06:43:38 +02:00
|
|
|
database_class = BaseDatabase
|
2018-06-11 15:33:32 +02:00
|
|
|
account_class = baseaccount.BaseAccount
|
|
|
|
network_class = basenetwork.BaseNetwork
|
|
|
|
transaction_class = basetransaction.BaseTransaction
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-08-16 06:43:38 +02:00
|
|
|
headers_class: Type[BaseHeaders]
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
pubkey_address_prefix: bytes
|
|
|
|
script_address_prefix: bytes
|
|
|
|
extended_public_key_prefix: bytes
|
|
|
|
extended_private_key_prefix: bytes
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
default_fee_per_byte = 10
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-07-12 04:37:15 +02:00
|
|
|
def __init__(self, config=None):
|
2018-05-25 08:03:25 +02:00
|
|
|
self.config = config or {}
|
2018-08-16 06:43:38 +02:00
|
|
|
self.db: BaseDatabase = self.config.get('db') or self.database_class(
|
2018-06-14 21:17:59 +02:00
|
|
|
os.path.join(self.path, "blockchain.db")
|
2018-08-16 06:43:38 +02:00
|
|
|
)
|
2018-10-03 13:08:02 +02:00
|
|
|
self.db.ledger = self
|
2018-08-16 06:43:38 +02:00
|
|
|
self.headers: BaseHeaders = self.config.get('headers') or self.headers_class(
|
|
|
|
os.path.join(self.path, "headers")
|
|
|
|
)
|
2018-07-12 04:37:15 +02:00
|
|
|
self.network = self.config.get('network') or self.network_class(self)
|
2018-08-16 06:43:38 +02:00
|
|
|
self.network.on_header.listen(self.receive_header)
|
|
|
|
self.network.on_status.listen(self.receive_status)
|
2018-07-17 05:58:29 +02:00
|
|
|
self.accounts = []
|
2018-07-29 02:52:54 +02:00
|
|
|
self.fee_per_byte: int = self.config.get('fee_per_byte', self.default_fee_per_byte)
|
2018-06-08 05:47:46 +02:00
|
|
|
|
|
|
|
self._on_transaction_controller = StreamController()
|
|
|
|
self.on_transaction = self._on_transaction_controller.stream
|
2018-06-26 23:22:05 +02:00
|
|
|
self.on_transaction.listen(
|
2018-07-29 02:52:54 +02:00
|
|
|
lambda e: log.info(
|
|
|
|
'(%s) on_transaction: address=%s, height=%s, is_verified=%s, tx.id=%s',
|
2018-10-03 13:08:02 +02:00
|
|
|
self.get_id(), e.address, e.tx.height, e.tx.is_verified, e.tx.id
|
2018-06-26 23:22:05 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self._on_header_controller = StreamController()
|
|
|
|
self.on_header = self._on_header_controller.stream
|
2018-08-16 06:43:38 +02:00
|
|
|
self.on_header.listen(
|
|
|
|
lambda change: log.info(
|
|
|
|
'%s: added %s header blocks, final height %s',
|
|
|
|
self.get_id(), change, self.headers.height
|
|
|
|
)
|
|
|
|
)
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-06-25 15:54:35 +02:00
|
|
|
self._transaction_processing_locks = {}
|
2018-07-29 02:52:54 +02:00
|
|
|
self._utxo_reservation_lock = defer.DeferredLock()
|
|
|
|
self._header_processing_lock = defer.DeferredLock()
|
2018-06-25 15:54:35 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
@classmethod
|
|
|
|
def get_id(cls):
|
|
|
|
return '{}_{}'.format(cls.symbol.lower(), cls.network_name.lower())
|
|
|
|
|
2018-07-15 22:04:11 +02:00
|
|
|
@classmethod
|
|
|
|
def hash160_to_address(cls, h160):
|
|
|
|
raw_address = cls.pubkey_address_prefix + h160
|
2018-06-11 15:33:32 +02:00
|
|
|
return Base58.encode(bytearray(raw_address + double_sha256(raw_address)[0:4]))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def address_to_hash160(address):
|
2018-07-29 02:52:54 +02:00
|
|
|
return Base58.decode(address)[1:21]
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-07-15 22:04:11 +02:00
|
|
|
@classmethod
|
|
|
|
def public_key_to_address(cls, public_key):
|
|
|
|
return cls.hash160_to_address(hash160(public_key))
|
2018-06-11 15:33:32 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def private_key_to_wif(private_key):
|
|
|
|
return b'\x1c' + private_key + b'\x01'
|
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
@property
|
|
|
|
def path(self):
|
2018-07-05 02:43:25 +02:00
|
|
|
return os.path.join(self.config['data_path'], self.get_id())
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-08-30 17:50:11 +02:00
|
|
|
def add_account(self, account: baseaccount.BaseAccount):
|
2018-07-17 05:58:29 +02:00
|
|
|
self.accounts.append(account)
|
2018-06-14 02:57:57 +02:00
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_private_key_for_address(self, address):
|
2018-10-09 04:49:50 +02:00
|
|
|
match = yield self.db.get_address(address=address)
|
2018-06-11 15:33:32 +02:00
|
|
|
if match:
|
|
|
|
for account in self.accounts:
|
2018-07-15 03:34:07 +02:00
|
|
|
if match['account'] == account.public_key.address:
|
2018-09-20 19:00:38 +02:00
|
|
|
return account.get_private_key(match['chain'], match['position'])
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-06-18 05:22:15 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-07-29 02:52:54 +02:00
|
|
|
def get_effective_amount_estimators(self, funding_accounts: Iterable[baseaccount.BaseAccount]):
|
2018-06-18 05:22:15 +02:00
|
|
|
estimators = []
|
|
|
|
for account in funding_accounts:
|
2018-10-07 20:53:44 +02:00
|
|
|
utxos = yield account.get_utxos()
|
2018-06-18 05:22:15 +02:00
|
|
|
for utxo in utxos:
|
|
|
|
estimators.append(utxo.get_estimator(self))
|
2018-09-20 19:00:38 +02:00
|
|
|
return estimators
|
2018-06-18 05:22:15 +02:00
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_spendable_utxos(self, amount: int, funding_accounts):
|
|
|
|
yield self._utxo_reservation_lock.acquire()
|
|
|
|
try:
|
|
|
|
txos = yield self.get_effective_amount_estimators(funding_accounts)
|
|
|
|
selector = CoinSelector(
|
|
|
|
txos, amount,
|
2018-08-03 16:41:40 +02:00
|
|
|
self.transaction_class.output_class.pay_pubkey_hash(COIN, NULL_HASH32).get_fee(self)
|
2018-07-29 02:52:54 +02:00
|
|
|
)
|
|
|
|
spendables = selector.select()
|
|
|
|
if spendables:
|
|
|
|
yield self.reserve_outputs(s.txo for s in spendables)
|
|
|
|
except Exception:
|
|
|
|
log.exception('Failed to get spendable utxos:')
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
self._utxo_reservation_lock.release()
|
2018-09-20 19:00:38 +02:00
|
|
|
return spendables
|
2018-07-29 02:52:54 +02:00
|
|
|
|
|
|
|
def reserve_outputs(self, txos):
|
|
|
|
return self.db.reserve_outputs(txos)
|
|
|
|
|
|
|
|
def release_outputs(self, txos):
|
|
|
|
return self.db.release_outputs(txos)
|
|
|
|
|
2018-06-12 16:02:04 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_local_status(self, address):
|
2018-10-09 04:44:30 +02:00
|
|
|
address_details = yield self.db.get_address(address=address)
|
2018-06-26 23:22:05 +02:00
|
|
|
history = address_details['history'] or ''
|
2018-07-29 02:52:54 +02:00
|
|
|
h = sha256(history.encode())
|
2018-09-20 19:00:38 +02:00
|
|
|
return hexlify(h)
|
2018-06-12 16:02:04 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_local_history(self, address):
|
2018-10-09 04:44:30 +02:00
|
|
|
address_details = yield self.db.get_address(address=address)
|
2018-06-26 23:22:05 +02:00
|
|
|
history = address_details['history'] or ''
|
|
|
|
parts = history.split(':')[:-1]
|
2018-09-20 19:00:38 +02:00
|
|
|
return list(zip(parts[0::2], map(int, parts[1::2])))
|
2018-06-12 16:02:04 +02:00
|
|
|
|
2018-06-25 15:54:35 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_root_of_merkle_tree(branches, branch_positions, working_branch):
|
|
|
|
for i, branch in enumerate(branches):
|
|
|
|
other_branch = unhexlify(branch)[::-1]
|
|
|
|
other_branch_on_left = bool((branch_positions >> i) & 1)
|
|
|
|
if other_branch_on_left:
|
|
|
|
combined = other_branch + working_branch
|
|
|
|
else:
|
|
|
|
combined = working_branch + other_branch
|
|
|
|
working_branch = double_sha256(combined)
|
|
|
|
return hexlify(working_branch[::-1])
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-09-26 00:02:50 +02:00
|
|
|
def validate_transaction_and_set_position(self, tx, height):
|
2018-09-20 19:00:38 +02:00
|
|
|
if not height <= len(self.headers):
|
|
|
|
return False
|
2018-07-15 03:34:07 +02:00
|
|
|
merkle = yield self.network.get_merkle(tx.id, height)
|
2018-06-25 15:54:35 +02:00
|
|
|
merkle_root = self.get_root_of_merkle_tree(merkle['merkle'], merkle['pos'], tx.hash)
|
|
|
|
header = self.headers[height]
|
2018-09-26 00:02:50 +02:00
|
|
|
tx.position = merkle['pos']
|
2018-10-03 13:08:02 +02:00
|
|
|
tx.is_verified = merkle_root == header['merkle_root']
|
2018-06-25 15:54:35 +02:00
|
|
|
|
2018-05-25 08:03:25 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def start(self):
|
2018-06-08 05:47:46 +02:00
|
|
|
if not os.path.exists(self.path):
|
|
|
|
os.mkdir(self.path)
|
2018-08-16 06:43:38 +02:00
|
|
|
yield defer.gatherResults([
|
|
|
|
self.db.open(),
|
|
|
|
self.headers.open()
|
|
|
|
])
|
2018-05-25 08:03:25 +02:00
|
|
|
first_connection = self.network.on_connected.first
|
|
|
|
self.network.start()
|
|
|
|
yield first_connection
|
2018-10-09 05:27:32 +02:00
|
|
|
yield self.join_network()
|
|
|
|
self.network.on_connected.listen(self.join_network)
|
2018-10-08 23:04:56 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def join_network(self, *args):
|
|
|
|
log.info("Subscribing and updating accounts.")
|
2018-05-25 08:03:25 +02:00
|
|
|
yield self.update_headers()
|
|
|
|
yield self.network.subscribe_headers()
|
|
|
|
yield self.update_accounts()
|
|
|
|
|
2018-06-27 02:41:03 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-25 08:03:25 +02:00
|
|
|
def stop(self):
|
2018-06-27 02:41:03 +02:00
|
|
|
yield self.network.stop()
|
2018-08-16 06:43:38 +02:00
|
|
|
yield self.db.close()
|
|
|
|
yield self.headers.close()
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-08-17 03:41:22 +02:00
|
|
|
def update_headers(self, height=None, headers=None, subscription_update=False):
|
2018-08-16 06:43:38 +02:00
|
|
|
rewound = 0
|
2018-05-25 08:03:25 +02:00
|
|
|
while True:
|
2018-08-16 06:43:38 +02:00
|
|
|
|
2018-08-17 03:41:22 +02:00
|
|
|
if height is None or height > len(self.headers):
|
|
|
|
# sometimes header subscription updates are for a header in the future
|
|
|
|
# which can't be connected, so we do a normal header sync instead
|
|
|
|
height = len(self.headers)
|
|
|
|
headers = None
|
|
|
|
subscription_update = False
|
|
|
|
|
|
|
|
if not headers:
|
2018-08-16 06:43:38 +02:00
|
|
|
header_response = yield self.network.get_headers(height, 2001)
|
|
|
|
headers = header_response['hex']
|
|
|
|
|
2018-08-17 03:41:22 +02:00
|
|
|
if not headers:
|
|
|
|
# Nothing to do, network thinks we're already at the latest height.
|
2018-08-16 06:43:38 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
added = yield self.headers.connect(height, unhexlify(headers))
|
|
|
|
if added > 0:
|
2018-08-17 03:41:22 +02:00
|
|
|
height += added
|
|
|
|
self._on_header_controller.add(
|
|
|
|
BlockHeightEvent(self.headers.height, added))
|
|
|
|
|
|
|
|
if rewound > 0:
|
|
|
|
# we started rewinding blocks and apparently found
|
|
|
|
# a new chain
|
|
|
|
rewound = 0
|
|
|
|
yield self.db.rewind_blockchain(height)
|
|
|
|
|
|
|
|
if subscription_update:
|
|
|
|
# subscription updates are for latest header already
|
|
|
|
# so we don't need to check if there are newer / more
|
|
|
|
# on another loop of update_headers(), just return instead
|
|
|
|
return
|
|
|
|
|
|
|
|
elif added == 0:
|
|
|
|
# we had headers to connect but none got connected, probably a reorganization
|
2018-08-16 06:43:38 +02:00
|
|
|
height -= 1
|
|
|
|
rewound += 1
|
2018-08-17 03:41:22 +02:00
|
|
|
log.warning(
|
2018-08-17 03:46:02 +02:00
|
|
|
"Blockchain Reorganization: attempting rewind to height %s from starting height %s",
|
|
|
|
height, height+rewound
|
2018-08-17 03:41:22 +02:00
|
|
|
)
|
|
|
|
|
2018-08-16 06:43:38 +02:00
|
|
|
else:
|
2018-08-17 03:41:22 +02:00
|
|
|
raise IndexError("headers.connect() returned negative number ({})".format(added))
|
2018-08-16 06:43:38 +02:00
|
|
|
|
|
|
|
if height < 0:
|
|
|
|
raise IndexError(
|
|
|
|
"Blockchain reorganization rewound all the way back to genesis hash. "
|
|
|
|
"Something is very wrong. Maybe you are on the wrong blockchain?"
|
|
|
|
)
|
|
|
|
|
2018-08-17 03:41:22 +02:00
|
|
|
if rewound >= 100:
|
2018-08-16 06:43:38 +02:00
|
|
|
raise IndexError(
|
|
|
|
"Blockchain reorganization dropped {} headers. This is highly unusual. "
|
2018-08-17 03:41:22 +02:00
|
|
|
"Will not continue to attempt reorganizing. Please, delete the ledger "
|
|
|
|
"synchronization directory inside your wallet directory (folder: '{}') and "
|
|
|
|
"restart the program to synchronize from scratch."
|
|
|
|
.format(rewound, self.get_id())
|
2018-08-16 06:43:38 +02:00
|
|
|
)
|
|
|
|
|
2018-08-17 03:41:22 +02:00
|
|
|
headers = None # ready to download some more headers
|
2018-08-16 06:43:38 +02:00
|
|
|
|
|
|
|
# if we made it this far and this was a subscription_update
|
2018-08-17 03:41:22 +02:00
|
|
|
# it means something went wrong and now we're doing a more
|
2018-08-16 06:43:38 +02:00
|
|
|
# robust sync, turn off subscription update shortcut
|
|
|
|
subscription_update = False
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-08-16 06:43:38 +02:00
|
|
|
def receive_header(self, response):
|
2018-07-29 02:52:54 +02:00
|
|
|
yield self._header_processing_lock.acquire()
|
|
|
|
try:
|
|
|
|
header = response[0]
|
2018-08-16 06:43:38 +02:00
|
|
|
yield self.update_headers(
|
|
|
|
height=header['height'], headers=header['hex'], subscription_update=True
|
|
|
|
)
|
2018-07-29 02:52:54 +02:00
|
|
|
finally:
|
|
|
|
self._header_processing_lock.release()
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
def update_accounts(self):
|
|
|
|
return defer.DeferredList([
|
|
|
|
self.update_account(a) for a in self.accounts
|
|
|
|
])
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2018-06-12 16:02:04 +02:00
|
|
|
def update_account(self, account): # type: (baseaccount.BaseAccount) -> defer.Defferred
|
2018-05-25 08:03:25 +02:00
|
|
|
# Before subscribing, download history for any addresses that don't have any,
|
|
|
|
# this avoids situation where we're getting status updates to addresses we know
|
|
|
|
# need to update anyways. Continue to get history and create more addresses until
|
|
|
|
# all missing addresses are created and history for them is fully restored.
|
2018-06-12 16:02:04 +02:00
|
|
|
yield account.ensure_address_gap()
|
2018-10-07 20:53:44 +02:00
|
|
|
addresses = yield account.get_addresses(used_times=0)
|
2018-05-25 08:03:25 +02:00
|
|
|
while addresses:
|
|
|
|
yield defer.DeferredList([
|
|
|
|
self.update_history(a) for a in addresses
|
|
|
|
])
|
2018-06-12 16:02:04 +02:00
|
|
|
addresses = yield account.ensure_address_gap()
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
# By this point all of the addresses should be restored and we
|
|
|
|
# can now subscribe all of them to receive updates.
|
2018-06-12 16:02:04 +02:00
|
|
|
all_addresses = yield account.get_addresses()
|
|
|
|
yield defer.DeferredList(
|
|
|
|
list(map(self.subscribe_history, all_addresses))
|
|
|
|
)
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-05-25 08:03:25 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-06-12 16:02:04 +02:00
|
|
|
def update_history(self, address):
|
|
|
|
remote_history = yield self.network.get_history(address)
|
2018-06-25 15:54:35 +02:00
|
|
|
local_history = yield self.get_local_history(address)
|
2018-06-12 16:02:04 +02:00
|
|
|
|
2018-06-25 15:54:35 +02:00
|
|
|
synced_history = []
|
2018-06-26 23:22:05 +02:00
|
|
|
for i, (hex_id, remote_height) in enumerate(map(itemgetter('tx_hash', 'height'), remote_history)):
|
2018-06-25 15:54:35 +02:00
|
|
|
|
2018-06-26 23:22:05 +02:00
|
|
|
synced_history.append((hex_id, remote_height))
|
2018-06-25 15:54:35 +02:00
|
|
|
|
2018-07-15 03:34:07 +02:00
|
|
|
if i < len(local_history) and local_history[i] == (hex_id, remote_height):
|
2018-06-12 16:02:04 +02:00
|
|
|
continue
|
|
|
|
|
2018-06-26 23:22:05 +02:00
|
|
|
lock = self._transaction_processing_locks.setdefault(hex_id, defer.DeferredLock())
|
2018-06-25 15:54:35 +02:00
|
|
|
|
|
|
|
yield lock.acquire()
|
|
|
|
|
2018-07-15 06:40:46 +02:00
|
|
|
try:
|
|
|
|
|
|
|
|
# see if we have a local copy of transaction, otherwise fetch it from server
|
2018-10-07 20:53:44 +02:00
|
|
|
tx = yield self.db.get_transaction(txid=hex_id)
|
2018-07-15 06:40:46 +02:00
|
|
|
save_tx = None
|
2018-10-03 13:08:02 +02:00
|
|
|
if tx is None:
|
2018-07-15 06:40:46 +02:00
|
|
|
_raw = yield self.network.get_transaction(hex_id)
|
2018-10-03 13:08:02 +02:00
|
|
|
tx = self.transaction_class(unhexlify(_raw))
|
2018-07-15 06:40:46 +02:00
|
|
|
save_tx = 'insert'
|
|
|
|
|
2018-10-03 13:08:02 +02:00
|
|
|
tx.height = remote_height
|
|
|
|
|
|
|
|
if remote_height > 0 and (not tx.is_verified or tx.position == -1):
|
|
|
|
yield self.validate_transaction_and_set_position(tx, remote_height)
|
2018-07-15 06:40:46 +02:00
|
|
|
if save_tx is None:
|
|
|
|
save_tx = 'update'
|
|
|
|
|
|
|
|
yield self.db.save_transaction_io(
|
2018-10-03 13:08:02 +02:00
|
|
|
save_tx, tx, address, self.address_to_hash160(address),
|
2018-07-15 06:40:46 +02:00
|
|
|
''.join('{}:{}:'.format(tx_id, tx_height) for tx_id, tx_height in synced_history)
|
|
|
|
)
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
log.debug(
|
|
|
|
"%s: sync'ed tx %s for address: %s, height: %s, verified: %s",
|
2018-10-03 13:08:02 +02:00
|
|
|
self.get_id(), hex_id, address, tx.height, tx.is_verified
|
2018-07-29 02:52:54 +02:00
|
|
|
)
|
2018-07-17 05:58:29 +02:00
|
|
|
|
2018-10-03 13:08:02 +02:00
|
|
|
self._on_transaction_controller.add(TransactionEvent(address, tx))
|
2018-07-15 06:40:46 +02:00
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
except Exception:
|
2018-07-15 06:40:46 +02:00
|
|
|
log.exception('Failed to synchronize transaction:')
|
2018-07-29 02:52:54 +02:00
|
|
|
raise
|
2018-07-15 06:40:46 +02:00
|
|
|
|
|
|
|
finally:
|
|
|
|
lock.release()
|
2018-07-15 22:04:11 +02:00
|
|
|
if not lock.locked and hex_id in self._transaction_processing_locks:
|
2018-07-15 06:40:46 +02:00
|
|
|
del self._transaction_processing_locks[hex_id]
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def subscribe_history(self, address):
|
2018-06-08 05:47:46 +02:00
|
|
|
remote_status = yield self.network.subscribe_address(address)
|
2018-06-12 16:02:04 +02:00
|
|
|
local_status = yield self.get_local_status(address)
|
2018-06-08 05:47:46 +02:00
|
|
|
if local_status != remote_status:
|
2018-06-12 16:02:04 +02:00
|
|
|
yield self.update_history(address)
|
2018-05-25 08:03:25 +02:00
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-08-16 06:43:38 +02:00
|
|
|
def receive_status(self, response):
|
2018-06-08 05:47:46 +02:00
|
|
|
address, remote_status = response
|
2018-06-12 16:02:04 +02:00
|
|
|
local_status = yield self.get_local_status(address)
|
2018-06-08 05:47:46 +02:00
|
|
|
if local_status != remote_status:
|
2018-06-12 16:02:04 +02:00
|
|
|
yield self.update_history(address)
|
2018-05-25 08:03:25 +02:00
|
|
|
|
|
|
|
def broadcast(self, tx):
|
2018-07-15 03:34:07 +02:00
|
|
|
return self.network.broadcast(hexlify(tx.raw).decode())
|