2018-11-03 23:50:34 +01:00
|
|
|
# Copyright (c) 2016-2017, Neil Booth
|
|
|
|
#
|
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# See the file "LICENCE" for information about the copyright
|
|
|
|
# and warranty status of this software.
|
|
|
|
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Class for handling asynchronous connections to a blockchain
|
|
|
|
daemon."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
import asyncio
|
|
|
|
import itertools
|
|
|
|
import json
|
|
|
|
import time
|
|
|
|
from calendar import timegm
|
|
|
|
from struct import pack
|
|
|
|
from time import strptime
|
|
|
|
|
|
|
|
import aiohttp
|
|
|
|
|
|
|
|
from torba.server.util import hex_to_bytes, class_logger,\
|
|
|
|
unpack_le_uint16_from, pack_varint
|
|
|
|
from torba.server.hash import hex_str_to_hash, hash_to_hex_str
|
|
|
|
from torba.server.tx import DeserializerDecred
|
2018-12-05 06:40:06 +01:00
|
|
|
from torba.rpc import JSONRPC
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
|
|
|
|
class DaemonError(Exception):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Raised when the daemon returns an error in its results."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
|
|
|
|
class WarmingUpError(Exception):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Internal - when the daemon is warming up."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
|
|
|
|
class WorkQueueFullError(Exception):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Internal - when the daemon's work queue is full."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
|
2018-11-26 02:51:41 +01:00
|
|
|
class Daemon:
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Handles connections to a daemon at the given URL."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
WARMING_UP = -28
|
|
|
|
id_counter = itertools.count()
|
|
|
|
|
|
|
|
def __init__(self, coin, url, max_workqueue=10, init_retry=0.25,
|
|
|
|
max_retry=4.0):
|
|
|
|
self.coin = coin
|
|
|
|
self.logger = class_logger(__name__, self.__class__.__name__)
|
|
|
|
self.set_url(url)
|
|
|
|
# Limit concurrent RPC calls to this number.
|
|
|
|
# See DEFAULT_HTTP_WORKQUEUE in bitcoind, which is typically 16
|
|
|
|
self.workqueue_semaphore = asyncio.Semaphore(value=max_workqueue)
|
|
|
|
self.init_retry = init_retry
|
|
|
|
self.max_retry = max_retry
|
|
|
|
self._height = None
|
|
|
|
self.available_rpcs = {}
|
|
|
|
|
|
|
|
def set_url(self, url):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Set the URLS to the given list, and switch to the first one."""
|
2018-11-03 23:50:34 +01:00
|
|
|
urls = url.split(',')
|
|
|
|
urls = [self.coin.sanitize_url(url) for url in urls]
|
|
|
|
for n, url in enumerate(urls):
|
|
|
|
status = '' if n else ' (current)'
|
|
|
|
logged_url = self.logged_url(url)
|
|
|
|
self.logger.info(f'daemon #{n + 1} at {logged_url}{status}')
|
|
|
|
self.url_index = 0
|
|
|
|
self.urls = urls
|
|
|
|
|
|
|
|
def current_url(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Returns the current daemon URL."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return self.urls[self.url_index]
|
|
|
|
|
|
|
|
def logged_url(self, url=None):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""The host and port part, for logging."""
|
2018-11-03 23:50:34 +01:00
|
|
|
url = url or self.current_url()
|
|
|
|
return url[url.rindex('@') + 1:]
|
|
|
|
|
|
|
|
def failover(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Call to fail-over to the next daemon URL.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
Returns False if there is only one, otherwise True.
|
2019-04-16 09:50:35 +02:00
|
|
|
"""
|
2018-11-03 23:50:34 +01:00
|
|
|
if len(self.urls) > 1:
|
|
|
|
self.url_index = (self.url_index + 1) % len(self.urls)
|
|
|
|
self.logger.info(f'failing over to {self.logged_url()}')
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def client_session(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""An aiohttp client session."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return aiohttp.ClientSession()
|
|
|
|
|
|
|
|
async def _send_data(self, data):
|
|
|
|
async with self.workqueue_semaphore:
|
|
|
|
async with self.client_session() as session:
|
|
|
|
async with session.post(self.current_url(), data=data) as resp:
|
|
|
|
kind = resp.headers.get('Content-Type', None)
|
|
|
|
if kind == 'application/json':
|
|
|
|
return await resp.json()
|
|
|
|
# bitcoind's HTTP protocol "handling" is a bad joke
|
|
|
|
text = await resp.text()
|
|
|
|
if 'Work queue depth exceeded' in text:
|
|
|
|
raise WorkQueueFullError
|
|
|
|
text = text.strip() or resp.reason
|
|
|
|
self.logger.error(text)
|
|
|
|
raise DaemonError(text)
|
|
|
|
|
|
|
|
async def _send(self, payload, processor):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Send a payload to be converted to JSON.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
Handles temporary connection issues. Daemon reponse errors
|
|
|
|
are raise through DaemonError.
|
2019-04-16 09:50:35 +02:00
|
|
|
"""
|
2018-11-03 23:50:34 +01:00
|
|
|
def log_error(error):
|
|
|
|
nonlocal last_error_log, retry
|
|
|
|
now = time.time()
|
|
|
|
if now - last_error_log > 60:
|
|
|
|
last_error_log = now
|
|
|
|
self.logger.error(f'{error} Retrying occasionally...')
|
|
|
|
if retry == self.max_retry and self.failover():
|
|
|
|
retry = 0
|
|
|
|
|
|
|
|
on_good_message = None
|
|
|
|
last_error_log = 0
|
|
|
|
data = json.dumps(payload)
|
|
|
|
retry = self.init_retry
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
result = await self._send_data(data)
|
|
|
|
result = processor(result)
|
|
|
|
if on_good_message:
|
|
|
|
self.logger.info(on_good_message)
|
|
|
|
return result
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
log_error('timeout error.')
|
|
|
|
except aiohttp.ServerDisconnectedError:
|
|
|
|
log_error('disconnected.')
|
|
|
|
on_good_message = 'connection restored'
|
|
|
|
except aiohttp.ClientConnectionError:
|
|
|
|
log_error('connection problem - is your daemon running?')
|
|
|
|
on_good_message = 'connection restored'
|
|
|
|
except aiohttp.ClientError as e:
|
|
|
|
log_error(f'daemon error: {e}')
|
|
|
|
on_good_message = 'running normally'
|
|
|
|
except WarmingUpError:
|
|
|
|
log_error('starting up checking blocks.')
|
|
|
|
on_good_message = 'running normally'
|
|
|
|
except WorkQueueFullError:
|
|
|
|
log_error('work queue full.')
|
|
|
|
on_good_message = 'running normally'
|
|
|
|
|
|
|
|
await asyncio.sleep(retry)
|
|
|
|
retry = max(min(self.max_retry, retry * 2), self.init_retry)
|
|
|
|
|
|
|
|
async def _send_single(self, method, params=None):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Send a single request to the daemon."""
|
2018-11-03 23:50:34 +01:00
|
|
|
def processor(result):
|
|
|
|
err = result['error']
|
|
|
|
if not err:
|
|
|
|
return result['result']
|
|
|
|
if err.get('code') == self.WARMING_UP:
|
|
|
|
raise WarmingUpError
|
|
|
|
raise DaemonError(err)
|
|
|
|
|
|
|
|
payload = {'method': method, 'id': next(self.id_counter)}
|
|
|
|
if params:
|
|
|
|
payload['params'] = params
|
|
|
|
return await self._send(payload, processor)
|
|
|
|
|
|
|
|
async def _send_vector(self, method, params_iterable, replace_errs=False):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Send several requests of the same method.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
The result will be an array of the same length as params_iterable.
|
|
|
|
If replace_errs is true, any item with an error is returned as None,
|
2019-04-16 09:50:35 +02:00
|
|
|
otherwise an exception is raised."""
|
2018-11-03 23:50:34 +01:00
|
|
|
def processor(result):
|
|
|
|
errs = [item['error'] for item in result if item['error']]
|
|
|
|
if any(err.get('code') == self.WARMING_UP for err in errs):
|
|
|
|
raise WarmingUpError
|
|
|
|
if not errs or replace_errs:
|
|
|
|
return [item['result'] for item in result]
|
|
|
|
raise DaemonError(errs)
|
|
|
|
|
|
|
|
payload = [{'method': method, 'params': p, 'id': next(self.id_counter)}
|
|
|
|
for p in params_iterable]
|
|
|
|
if payload:
|
|
|
|
return await self._send(payload, processor)
|
|
|
|
return []
|
|
|
|
|
|
|
|
async def _is_rpc_available(self, method):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return whether given RPC method is available in the daemon.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
Results are cached and the daemon will generally not be queried with
|
2019-04-16 09:50:35 +02:00
|
|
|
the same method more than once."""
|
2018-11-03 23:50:34 +01:00
|
|
|
available = self.available_rpcs.get(method)
|
|
|
|
if available is None:
|
|
|
|
available = True
|
|
|
|
try:
|
|
|
|
await self._send_single(method)
|
|
|
|
except DaemonError as e:
|
|
|
|
err = e.args[0]
|
|
|
|
error_code = err.get("code")
|
|
|
|
available = error_code != JSONRPC.METHOD_NOT_FOUND
|
|
|
|
self.available_rpcs[method] = available
|
|
|
|
return available
|
|
|
|
|
|
|
|
async def block_hex_hashes(self, first, count):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the hex hashes of count block starting at height first."""
|
2018-11-03 23:50:34 +01:00
|
|
|
params_iterable = ((h, ) for h in range(first, first + count))
|
|
|
|
return await self._send_vector('getblockhash', params_iterable)
|
|
|
|
|
|
|
|
async def deserialised_block(self, hex_hash):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the deserialised block with the given hex hash."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('getblock', (hex_hash, True))
|
|
|
|
|
|
|
|
async def raw_blocks(self, hex_hashes):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the raw binary blocks with the given hex hashes."""
|
2018-11-03 23:50:34 +01:00
|
|
|
params_iterable = ((h, False) for h in hex_hashes)
|
|
|
|
blocks = await self._send_vector('getblock', params_iterable)
|
|
|
|
# Convert hex string to bytes
|
|
|
|
return [hex_to_bytes(block) for block in blocks]
|
|
|
|
|
|
|
|
async def mempool_hashes(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Update our record of the daemon's mempool hashes."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('getrawmempool')
|
|
|
|
|
|
|
|
async def estimatefee(self, block_count):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the fee estimate for the block count. Units are whole
|
2018-11-03 23:50:34 +01:00
|
|
|
currency units per KB, e.g. 0.00000995, or -1 if no estimate
|
|
|
|
is available.
|
2019-04-16 09:50:35 +02:00
|
|
|
"""
|
2018-11-03 23:50:34 +01:00
|
|
|
args = (block_count, )
|
|
|
|
if await self._is_rpc_available('estimatesmartfee'):
|
|
|
|
estimate = await self._send_single('estimatesmartfee', args)
|
|
|
|
return estimate.get('feerate', -1)
|
|
|
|
return await self._send_single('estimatefee', args)
|
|
|
|
|
|
|
|
async def getnetworkinfo(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the result of the 'getnetworkinfo' RPC call."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('getnetworkinfo')
|
|
|
|
|
|
|
|
async def relayfee(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""The minimum fee a low-priority tx must pay in order to be accepted
|
|
|
|
to the daemon's memory pool."""
|
2018-11-03 23:50:34 +01:00
|
|
|
network_info = await self.getnetworkinfo()
|
|
|
|
return network_info['relayfee']
|
|
|
|
|
|
|
|
async def getrawtransaction(self, hex_hash, verbose=False):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the serialized raw transaction with the given hash."""
|
2018-11-03 23:50:34 +01:00
|
|
|
# Cast to int because some coin daemons are old and require it
|
|
|
|
return await self._send_single('getrawtransaction',
|
|
|
|
(hex_hash, int(verbose)))
|
|
|
|
|
|
|
|
async def getrawtransactions(self, hex_hashes, replace_errs=True):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the serialized raw transactions with the given hashes.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
2019-04-16 09:50:35 +02:00
|
|
|
Replaces errors with None by default."""
|
2018-11-03 23:50:34 +01:00
|
|
|
params_iterable = ((hex_hash, 0) for hex_hash in hex_hashes)
|
|
|
|
txs = await self._send_vector('getrawtransaction', params_iterable,
|
|
|
|
replace_errs=replace_errs)
|
|
|
|
# Convert hex strings to bytes
|
|
|
|
return [hex_to_bytes(tx) if tx else None for tx in txs]
|
|
|
|
|
|
|
|
async def broadcast_transaction(self, raw_tx):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Broadcast a transaction to the network."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('sendrawtransaction', (raw_tx, ))
|
|
|
|
|
|
|
|
async def height(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Query the daemon for its current height."""
|
2018-11-03 23:50:34 +01:00
|
|
|
self._height = await self._send_single('getblockcount')
|
|
|
|
return self._height
|
|
|
|
|
|
|
|
def cached_height(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the cached daemon height.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
2019-04-16 09:50:35 +02:00
|
|
|
If the daemon has not been queried yet this returns None."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return self._height
|
|
|
|
|
|
|
|
|
|
|
|
class DashDaemon(Daemon):
|
|
|
|
|
|
|
|
async def masternode_broadcast(self, params):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Broadcast a transaction to the network."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('masternodebroadcast', params)
|
|
|
|
|
|
|
|
async def masternode_list(self, params):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the masternode status."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('masternodelist', params)
|
|
|
|
|
|
|
|
|
|
|
|
class FakeEstimateFeeDaemon(Daemon):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Daemon that simulates estimatefee and relayfee RPC calls. Coin that
|
|
|
|
wants to use this daemon must define ESTIMATE_FEE & RELAY_FEE"""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
async def estimatefee(self, block_count):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the fee estimate for the given parameters."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return self.coin.ESTIMATE_FEE
|
|
|
|
|
|
|
|
async def relayfee(self):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""The minimum fee a low-priority tx must pay in order to be accepted
|
|
|
|
to the daemon's memory pool."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return self.coin.RELAY_FEE
|
|
|
|
|
|
|
|
|
|
|
|
class LegacyRPCDaemon(Daemon):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Handles connections to a daemon at the given URL.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
This class is useful for daemons that don't have the new 'getblock'
|
|
|
|
RPC call that returns the block in hex, the workaround is to manually
|
|
|
|
recreate the block bytes. The recreated block bytes may not be the exact
|
|
|
|
as in the underlying blockchain but it is good enough for our indexing
|
2019-04-16 09:50:35 +02:00
|
|
|
purposes."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
async def raw_blocks(self, hex_hashes):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the raw binary blocks with the given hex hashes."""
|
2018-11-03 23:50:34 +01:00
|
|
|
params_iterable = ((h, ) for h in hex_hashes)
|
|
|
|
block_info = await self._send_vector('getblock', params_iterable)
|
|
|
|
|
|
|
|
blocks = []
|
|
|
|
for i in block_info:
|
|
|
|
raw_block = await self.make_raw_block(i)
|
|
|
|
blocks.append(raw_block)
|
|
|
|
|
|
|
|
# Convert hex string to bytes
|
|
|
|
return blocks
|
|
|
|
|
|
|
|
async def make_raw_header(self, b):
|
|
|
|
pbh = b.get('previousblockhash')
|
|
|
|
if pbh is None:
|
|
|
|
pbh = '0' * 64
|
|
|
|
return b''.join([
|
|
|
|
pack('<L', b.get('version')),
|
|
|
|
hex_str_to_hash(pbh),
|
|
|
|
hex_str_to_hash(b.get('merkleroot')),
|
|
|
|
pack('<L', self.timestamp_safe(b['time'])),
|
|
|
|
pack('<L', int(b.get('bits'), 16)),
|
|
|
|
pack('<L', int(b.get('nonce')))
|
|
|
|
])
|
|
|
|
|
|
|
|
async def make_raw_block(self, b):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Construct a raw block"""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
header = await self.make_raw_header(b)
|
|
|
|
|
|
|
|
transactions = []
|
|
|
|
if b.get('height') > 0:
|
|
|
|
transactions = await self.getrawtransactions(b.get('tx'), False)
|
|
|
|
|
|
|
|
raw_block = header
|
|
|
|
num_txs = len(transactions)
|
|
|
|
if num_txs > 0:
|
|
|
|
raw_block += pack_varint(num_txs)
|
|
|
|
raw_block += b''.join(transactions)
|
|
|
|
else:
|
|
|
|
raw_block += b'\x00'
|
|
|
|
|
|
|
|
return raw_block
|
|
|
|
|
|
|
|
def timestamp_safe(self, t):
|
|
|
|
if isinstance(t, int):
|
|
|
|
return t
|
|
|
|
return timegm(strptime(t, "%Y-%m-%d %H:%M:%S %Z"))
|
|
|
|
|
|
|
|
|
|
|
|
class DecredDaemon(Daemon):
|
|
|
|
async def raw_blocks(self, hex_hashes):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the raw binary blocks with the given hex hashes."""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
params_iterable = ((h, False) for h in hex_hashes)
|
|
|
|
blocks = await self._send_vector('getblock', params_iterable)
|
|
|
|
|
|
|
|
raw_blocks = []
|
|
|
|
valid_tx_tree = {}
|
|
|
|
for block in blocks:
|
|
|
|
# Convert to bytes from hex
|
|
|
|
raw_block = hex_to_bytes(block)
|
|
|
|
raw_blocks.append(raw_block)
|
|
|
|
# Check if previous block is valid
|
|
|
|
prev = self.prev_hex_hash(raw_block)
|
|
|
|
votebits = unpack_le_uint16_from(raw_block[100:102])[0]
|
|
|
|
valid_tx_tree[prev] = self.is_valid_tx_tree(votebits)
|
|
|
|
|
|
|
|
processed_raw_blocks = []
|
|
|
|
for hash, raw_block in zip(hex_hashes, raw_blocks):
|
|
|
|
if hash in valid_tx_tree:
|
|
|
|
is_valid = valid_tx_tree[hash]
|
|
|
|
else:
|
|
|
|
# Do something complicated to figure out if this block is valid
|
|
|
|
header = await self._send_single('getblockheader', (hash, ))
|
|
|
|
if 'nextblockhash' not in header:
|
|
|
|
raise DaemonError(f'Could not find next block for {hash}')
|
|
|
|
next_hash = header['nextblockhash']
|
|
|
|
next_header = await self._send_single('getblockheader',
|
|
|
|
(next_hash, ))
|
|
|
|
is_valid = self.is_valid_tx_tree(next_header['votebits'])
|
|
|
|
|
|
|
|
if is_valid:
|
|
|
|
processed_raw_blocks.append(raw_block)
|
|
|
|
else:
|
|
|
|
# If this block is invalid remove the normal transactions
|
|
|
|
self.logger.info(f'block {hash} is invalidated')
|
|
|
|
processed_raw_blocks.append(self.strip_tx_tree(raw_block))
|
|
|
|
|
|
|
|
return processed_raw_blocks
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def prev_hex_hash(raw_block):
|
|
|
|
return hash_to_hex_str(raw_block[4:36])
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def is_valid_tx_tree(votebits):
|
|
|
|
# Check if previous block was invalidated.
|
|
|
|
return bool(votebits & (1 << 0) != 0)
|
|
|
|
|
|
|
|
def strip_tx_tree(self, raw_block):
|
|
|
|
c = self.coin
|
|
|
|
assert issubclass(c.DESERIALIZER, DeserializerDecred)
|
|
|
|
d = c.DESERIALIZER(raw_block, start=c.BASIC_HEADER_SIZE)
|
|
|
|
d.read_tx_tree() # Skip normal transactions
|
|
|
|
# Create a fake block without any normal transactions
|
|
|
|
return raw_block[:c.BASIC_HEADER_SIZE] + b'\x00' + raw_block[d.cursor:]
|
|
|
|
|
|
|
|
async def height(self):
|
|
|
|
height = await super().height()
|
|
|
|
if height > 0:
|
|
|
|
# Lie about the daemon height as the current tip can be invalidated
|
|
|
|
height -= 1
|
|
|
|
self._height = height
|
|
|
|
return height
|
|
|
|
|
|
|
|
async def mempool_hashes(self):
|
|
|
|
mempool = await super().mempool_hashes()
|
|
|
|
# Add current tip transactions to the 'fake' mempool.
|
|
|
|
real_height = await self._send_single('getblockcount')
|
|
|
|
tip_hash = await self._send_single('getblockhash', (real_height,))
|
|
|
|
tip = await self.deserialised_block(tip_hash)
|
|
|
|
# Add normal transactions except coinbase
|
|
|
|
mempool += tip['tx'][1:]
|
|
|
|
# Add stake transactions if applicable
|
|
|
|
mempool += tip.get('stx', [])
|
|
|
|
return mempool
|
|
|
|
|
|
|
|
def client_session(self):
|
|
|
|
# FIXME allow self signed certificates
|
|
|
|
connector = aiohttp.TCPConnector(verify_ssl=False)
|
|
|
|
return aiohttp.ClientSession(connector=connector)
|
|
|
|
|
|
|
|
|
|
|
|
class PreLegacyRPCDaemon(LegacyRPCDaemon):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Handles connections to a daemon at the given URL.
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
This class is useful for daemons that don't have the new 'getblock'
|
|
|
|
RPC call that returns the block in hex, and need the False parameter
|
2019-04-16 09:50:35 +02:00
|
|
|
for the getblock"""
|
2018-11-03 23:50:34 +01:00
|
|
|
|
|
|
|
async def deserialised_block(self, hex_hash):
|
2019-04-16 09:50:35 +02:00
|
|
|
"""Return the deserialised block with the given hex hash."""
|
2018-11-03 23:50:34 +01:00
|
|
|
return await self._send_single('getblock', (hex_hash, False))
|