diff --git a/lbry/.gitignore b/lbry/.gitignore index cfc004a50..02efd9dbf 100644 --- a/lbry/.gitignore +++ b/lbry/.gitignore @@ -12,3 +12,5 @@ _trial_temp/ /tests/integration/files /tests/.coverage.* + +/lbry/wallet/bin diff --git a/torba/MANIFEST.in b/lbry/MANIFEST.in similarity index 100% rename from torba/MANIFEST.in rename to lbry/MANIFEST.in diff --git a/torba/tests/client_tests/__init__.py b/lbry/lbry/wallet/client/__init__.py similarity index 100% rename from torba/tests/client_tests/__init__.py rename to lbry/lbry/wallet/client/__init__.py diff --git a/torba/torba/client/baseaccount.py b/lbry/lbry/wallet/client/baseaccount.py similarity index 100% rename from torba/torba/client/baseaccount.py rename to lbry/lbry/wallet/client/baseaccount.py diff --git a/torba/torba/client/basedatabase.py b/lbry/lbry/wallet/client/basedatabase.py similarity index 100% rename from torba/torba/client/basedatabase.py rename to lbry/lbry/wallet/client/basedatabase.py diff --git a/torba/torba/client/baseheader.py b/lbry/lbry/wallet/client/baseheader.py similarity index 100% rename from torba/torba/client/baseheader.py rename to lbry/lbry/wallet/client/baseheader.py diff --git a/torba/torba/client/baseledger.py b/lbry/lbry/wallet/client/baseledger.py similarity index 100% rename from torba/torba/client/baseledger.py rename to lbry/lbry/wallet/client/baseledger.py diff --git a/torba/torba/client/basemanager.py b/lbry/lbry/wallet/client/basemanager.py similarity index 100% rename from torba/torba/client/basemanager.py rename to lbry/lbry/wallet/client/basemanager.py diff --git a/torba/torba/client/basenetwork.py b/lbry/lbry/wallet/client/basenetwork.py similarity index 100% rename from torba/torba/client/basenetwork.py rename to lbry/lbry/wallet/client/basenetwork.py diff --git a/torba/torba/client/basescript.py b/lbry/lbry/wallet/client/basescript.py similarity index 100% rename from torba/torba/client/basescript.py rename to lbry/lbry/wallet/client/basescript.py diff --git a/torba/torba/client/basetransaction.py b/lbry/lbry/wallet/client/basetransaction.py similarity index 100% rename from torba/torba/client/basetransaction.py rename to lbry/lbry/wallet/client/basetransaction.py diff --git a/torba/torba/client/bcd_data_stream.py b/lbry/lbry/wallet/client/bcd_data_stream.py similarity index 100% rename from torba/torba/client/bcd_data_stream.py rename to lbry/lbry/wallet/client/bcd_data_stream.py diff --git a/torba/torba/client/bip32.py b/lbry/lbry/wallet/client/bip32.py similarity index 100% rename from torba/torba/client/bip32.py rename to lbry/lbry/wallet/client/bip32.py diff --git a/torba/torba/client/cli.py b/lbry/lbry/wallet/client/cli.py similarity index 100% rename from torba/torba/client/cli.py rename to lbry/lbry/wallet/client/cli.py diff --git a/torba/torba/client/coinselection.py b/lbry/lbry/wallet/client/coinselection.py similarity index 100% rename from torba/torba/client/coinselection.py rename to lbry/lbry/wallet/client/coinselection.py diff --git a/torba/torba/client/constants.py b/lbry/lbry/wallet/client/constants.py similarity index 100% rename from torba/torba/client/constants.py rename to lbry/lbry/wallet/client/constants.py diff --git a/torba/torba/client/errors.py b/lbry/lbry/wallet/client/errors.py similarity index 100% rename from torba/torba/client/errors.py rename to lbry/lbry/wallet/client/errors.py diff --git a/torba/torba/client/hash.py b/lbry/lbry/wallet/client/hash.py similarity index 100% rename from torba/torba/client/hash.py rename to lbry/lbry/wallet/client/hash.py diff --git a/torba/torba/client/mnemonic.py b/lbry/lbry/wallet/client/mnemonic.py similarity index 100% rename from torba/torba/client/mnemonic.py rename to lbry/lbry/wallet/client/mnemonic.py diff --git a/torba/torba/client/util.py b/lbry/lbry/wallet/client/util.py similarity index 100% rename from torba/torba/client/util.py rename to lbry/lbry/wallet/client/util.py diff --git a/torba/torba/client/wallet.py b/lbry/lbry/wallet/client/wallet.py similarity index 100% rename from torba/torba/client/wallet.py rename to lbry/lbry/wallet/client/wallet.py diff --git a/torba/tests/client_tests/integration/__init__.py b/lbry/lbry/wallet/client/words/__init__.py similarity index 100% rename from torba/tests/client_tests/integration/__init__.py rename to lbry/lbry/wallet/client/words/__init__.py diff --git a/torba/torba/client/words/chinese_simplified.py b/lbry/lbry/wallet/client/words/chinese_simplified.py similarity index 100% rename from torba/torba/client/words/chinese_simplified.py rename to lbry/lbry/wallet/client/words/chinese_simplified.py diff --git a/torba/torba/client/words/english.py b/lbry/lbry/wallet/client/words/english.py similarity index 100% rename from torba/torba/client/words/english.py rename to lbry/lbry/wallet/client/words/english.py diff --git a/torba/torba/client/words/japanese.py b/lbry/lbry/wallet/client/words/japanese.py similarity index 100% rename from torba/torba/client/words/japanese.py rename to lbry/lbry/wallet/client/words/japanese.py diff --git a/torba/torba/client/words/portuguese.py b/lbry/lbry/wallet/client/words/portuguese.py similarity index 100% rename from torba/torba/client/words/portuguese.py rename to lbry/lbry/wallet/client/words/portuguese.py diff --git a/torba/torba/client/words/spanish.py b/lbry/lbry/wallet/client/words/spanish.py similarity index 100% rename from torba/torba/client/words/spanish.py rename to lbry/lbry/wallet/client/words/spanish.py diff --git a/torba/torba/coin/__init__.py b/lbry/lbry/wallet/coin/__init__.py similarity index 100% rename from torba/torba/coin/__init__.py rename to lbry/lbry/wallet/coin/__init__.py diff --git a/torba/torba/coin/bitcoincash.py b/lbry/lbry/wallet/coin/bitcoincash.py similarity index 100% rename from torba/torba/coin/bitcoincash.py rename to lbry/lbry/wallet/coin/bitcoincash.py diff --git a/torba/torba/coin/bitcoinsegwit.py b/lbry/lbry/wallet/coin/bitcoinsegwit.py similarity index 100% rename from torba/torba/coin/bitcoinsegwit.py rename to lbry/lbry/wallet/coin/bitcoinsegwit.py diff --git a/torba/torba/orchstr8/__init__.py b/lbry/lbry/wallet/orchstr8/__init__.py similarity index 100% rename from torba/torba/orchstr8/__init__.py rename to lbry/lbry/wallet/orchstr8/__init__.py diff --git a/torba/torba/orchstr8/cli.py b/lbry/lbry/wallet/orchstr8/cli.py similarity index 100% rename from torba/torba/orchstr8/cli.py rename to lbry/lbry/wallet/orchstr8/cli.py diff --git a/torba/torba/orchstr8/node.py b/lbry/lbry/wallet/orchstr8/node.py similarity index 100% rename from torba/torba/orchstr8/node.py rename to lbry/lbry/wallet/orchstr8/node.py diff --git a/torba/torba/orchstr8/service.py b/lbry/lbry/wallet/orchstr8/service.py similarity index 100% rename from torba/torba/orchstr8/service.py rename to lbry/lbry/wallet/orchstr8/service.py diff --git a/torba/torba/rpc/__init__.py b/lbry/lbry/wallet/rpc/__init__.py similarity index 100% rename from torba/torba/rpc/__init__.py rename to lbry/lbry/wallet/rpc/__init__.py diff --git a/torba/torba/rpc/framing.py b/lbry/lbry/wallet/rpc/framing.py similarity index 100% rename from torba/torba/rpc/framing.py rename to lbry/lbry/wallet/rpc/framing.py diff --git a/torba/torba/rpc/jsonrpc.py b/lbry/lbry/wallet/rpc/jsonrpc.py similarity index 100% rename from torba/torba/rpc/jsonrpc.py rename to lbry/lbry/wallet/rpc/jsonrpc.py diff --git a/torba/torba/rpc/session.py b/lbry/lbry/wallet/rpc/session.py similarity index 100% rename from torba/torba/rpc/session.py rename to lbry/lbry/wallet/rpc/session.py diff --git a/torba/torba/rpc/socks.py b/lbry/lbry/wallet/rpc/socks.py similarity index 100% rename from torba/torba/rpc/socks.py rename to lbry/lbry/wallet/rpc/socks.py diff --git a/torba/torba/rpc/util.py b/lbry/lbry/wallet/rpc/util.py similarity index 100% rename from torba/torba/rpc/util.py rename to lbry/lbry/wallet/rpc/util.py diff --git a/lbry/lbry/wallet/server/block_processor.py b/lbry/lbry/wallet/server/block_processor.py index d86eda514..703dcbf25 100644 --- a/lbry/lbry/wallet/server/block_processor.py +++ b/lbry/lbry/wallet/server/block_processor.py @@ -1,10 +1,711 @@ import time -from torba.server.block_processor import BlockProcessor - from lbry.schema.claim import Claim from lbry.wallet.server.db.writer import SQLDB +import asyncio +from struct import pack, unpack + +import torba +from torba.server.daemon import DaemonError +from torba.server.hash import hash_to_hex_str, HASHX_LEN +from torba.server.util import chunks, class_logger +from torba.server.db import FlushData + + +class Prefetcher: + """Prefetches blocks (in the forward direction only).""" + + def __init__(self, daemon, coin, blocks_event): + self.logger = class_logger(__name__, self.__class__.__name__) + self.daemon = daemon + self.coin = coin + self.blocks_event = blocks_event + self.blocks = [] + self.caught_up = False + # Access to fetched_height should be protected by the semaphore + self.fetched_height = None + self.semaphore = asyncio.Semaphore() + self.refill_event = asyncio.Event() + # The prefetched block cache size. The min cache size has + # little effect on sync time. + self.cache_size = 0 + self.min_cache_size = 10 * 1024 * 1024 + # This makes the first fetch be 10 blocks + self.ave_size = self.min_cache_size // 10 + self.polling_delay = 5 + + async def main_loop(self, bp_height): + """Loop forever polling for more blocks.""" + await self.reset_height(bp_height) + while True: + try: + # Sleep a while if there is nothing to prefetch + await self.refill_event.wait() + if not await self._prefetch_blocks(): + await asyncio.sleep(self.polling_delay) + except DaemonError as e: + self.logger.info(f'ignoring daemon error: {e}') + + def get_prefetched_blocks(self): + """Called by block processor when it is processing queued blocks.""" + blocks = self.blocks + self.blocks = [] + self.cache_size = 0 + self.refill_event.set() + return blocks + + async def reset_height(self, height): + """Reset to prefetch blocks from the block processor's height. + + Used in blockchain reorganisations. This coroutine can be + called asynchronously to the _prefetch_blocks coroutine so we + must synchronize with a semaphore. + """ + async with self.semaphore: + self.blocks.clear() + self.cache_size = 0 + self.fetched_height = height + self.refill_event.set() + + daemon_height = await self.daemon.height() + behind = daemon_height - height + if behind > 0: + self.logger.info(f'catching up to daemon height {daemon_height:,d} ' + f'({behind:,d} blocks behind)') + else: + self.logger.info(f'caught up to daemon height {daemon_height:,d}') + + async def _prefetch_blocks(self): + """Prefetch some blocks and put them on the queue. + + Repeats until the queue is full or caught up. + """ + daemon = self.daemon + daemon_height = await daemon.height() + async with self.semaphore: + while self.cache_size < self.min_cache_size: + # Try and catch up all blocks but limit to room in cache. + # Constrain fetch count to between 0 and 500 regardless; + # testnet can be lumpy. + cache_room = self.min_cache_size // self.ave_size + count = min(daemon_height - self.fetched_height, cache_room) + count = min(500, max(count, 0)) + if not count: + self.caught_up = True + return False + + first = self.fetched_height + 1 + hex_hashes = await daemon.block_hex_hashes(first, count) + if self.caught_up: + self.logger.info('new block height {:,d} hash {}' + .format(first + count-1, hex_hashes[-1])) + blocks = await daemon.raw_blocks(hex_hashes) + + assert count == len(blocks) + + # Special handling for genesis block + if first == 0: + blocks[0] = self.coin.genesis_block(blocks[0]) + self.logger.info(f'verified genesis block with hash {hex_hashes[0]}') + + # Update our recent average block size estimate + size = sum(len(block) for block in blocks) + if count >= 10: + self.ave_size = size // count + else: + self.ave_size = (size + (10 - count) * self.ave_size) // 10 + + self.blocks.extend(blocks) + self.cache_size += size + self.fetched_height += count + self.blocks_event.set() + + self.refill_event.clear() + return True + + +class ChainError(Exception): + """Raised on error processing blocks.""" + + +class BlockProcessor: + """Process blocks and update the DB state to match. + + Employ a prefetcher to prefetch blocks in batches for processing. + Coordinate backing up in case of chain reorganisations. + """ + + def __init__(self, env, db, daemon, notifications): + self.env = env + self.db = db + self.daemon = daemon + self.notifications = notifications + + self.coin = env.coin + self.blocks_event = asyncio.Event() + self.prefetcher = Prefetcher(daemon, env.coin, self.blocks_event) + self.logger = class_logger(__name__, self.__class__.__name__) + + # Meta + self.next_cache_check = 0 + self.touched = set() + self.reorg_count = 0 + + # Caches of unflushed items. + self.headers = [] + self.tx_hashes = [] + self.undo_infos = [] + + # UTXO cache + self.utxo_cache = {} + self.db_deletes = [] + + # If the lock is successfully acquired, in-memory chain state + # is consistent with self.height + self.state_lock = asyncio.Lock() + + async def run_in_thread_with_lock(self, func, *args): + # Run in a thread to prevent blocking. Shielded so that + # cancellations from shutdown don't lose work - when the task + # completes the data will be flushed and then we shut down. + # Take the state lock to be certain in-memory state is + # consistent and not being updated elsewhere. + async def run_in_thread_locked(): + async with self.state_lock: + return await asyncio.get_event_loop().run_in_executor(None, func, *args) + return await asyncio.shield(run_in_thread_locked()) + + async def check_and_advance_blocks(self, raw_blocks): + """Process the list of raw blocks passed. Detects and handles + reorgs. + """ + if not raw_blocks: + return + first = self.height + 1 + blocks = [self.coin.block(raw_block, first + n) + for n, raw_block in enumerate(raw_blocks)] + headers = [block.header for block in blocks] + hprevs = [self.coin.header_prevhash(h) for h in headers] + chain = [self.tip] + [self.coin.header_hash(h) for h in headers[:-1]] + + if hprevs == chain: + start = time.time() + await self.run_in_thread_with_lock(self.advance_blocks, blocks) + await self._maybe_flush() + if not self.db.first_sync: + s = '' if len(blocks) == 1 else 's' + self.logger.info('processed {:,d} block{} in {:.1f}s' + .format(len(blocks), s, + time.time() - start)) + if self._caught_up_event.is_set(): + await self.notifications.on_block(self.touched, self.height) + self.touched = set() + elif hprevs[0] != chain[0]: + await self.reorg_chain() + else: + # It is probably possible but extremely rare that what + # bitcoind returns doesn't form a chain because it + # reorg-ed the chain as it was processing the batched + # block hash requests. Should this happen it's simplest + # just to reset the prefetcher and try again. + self.logger.warning('daemon blocks do not form a chain; ' + 'resetting the prefetcher') + await self.prefetcher.reset_height(self.height) + + async def reorg_chain(self, count=None): + """Handle a chain reorganisation. + + Count is the number of blocks to simulate a reorg, or None for + a real reorg.""" + if count is None: + self.logger.info('chain reorg detected') + else: + self.logger.info(f'faking a reorg of {count:,d} blocks') + await self.flush(True) + + async def get_raw_blocks(last_height, hex_hashes): + heights = range(last_height, last_height - len(hex_hashes), -1) + try: + blocks = [self.db.read_raw_block(height) for height in heights] + self.logger.info(f'read {len(blocks)} blocks from disk') + return blocks + except FileNotFoundError: + return await self.daemon.raw_blocks(hex_hashes) + + def flush_backup(): + # self.touched can include other addresses which is + # harmless, but remove None. + self.touched.discard(None) + self.db.flush_backup(self.flush_data(), self.touched) + + start, last, hashes = await self.reorg_hashes(count) + # Reverse and convert to hex strings. + hashes = [hash_to_hex_str(hash) for hash in reversed(hashes)] + for hex_hashes in chunks(hashes, 50): + raw_blocks = await get_raw_blocks(last, hex_hashes) + await self.run_in_thread_with_lock(self.backup_blocks, raw_blocks) + await self.run_in_thread_with_lock(flush_backup) + last -= len(raw_blocks) + await self.prefetcher.reset_height(self.height) + + async def reorg_hashes(self, count): + """Return a pair (start, last, hashes) of blocks to back up during a + reorg. + + The hashes are returned in order of increasing height. Start + is the height of the first hash, last of the last. + """ + start, count = await self.calc_reorg_range(count) + last = start + count - 1 + s = '' if count == 1 else 's' + self.logger.info(f'chain was reorganised replacing {count:,d} ' + f'block{s} at heights {start:,d}-{last:,d}') + + return start, last, await self.db.fs_block_hashes(start, count) + + async def calc_reorg_range(self, count): + """Calculate the reorg range""" + + def diff_pos(hashes1, hashes2): + """Returns the index of the first difference in the hash lists. + If both lists match returns their length.""" + for n, (hash1, hash2) in enumerate(zip(hashes1, hashes2)): + if hash1 != hash2: + return n + return len(hashes) + + if count is None: + # A real reorg + start = self.height - 1 + count = 1 + while start > 0: + hashes = await self.db.fs_block_hashes(start, count) + hex_hashes = [hash_to_hex_str(hash) for hash in hashes] + d_hex_hashes = await self.daemon.block_hex_hashes(start, count) + n = diff_pos(hex_hashes, d_hex_hashes) + if n > 0: + start += n + break + count = min(count * 2, start) + start -= count + + count = (self.height - start) + 1 + else: + start = (self.height - count) + 1 + + return start, count + + def estimate_txs_remaining(self): + # Try to estimate how many txs there are to go + daemon_height = self.daemon.cached_height() + coin = self.coin + tail_count = daemon_height - max(self.height, coin.TX_COUNT_HEIGHT) + # Damp the initial enthusiasm + realism = max(2.0 - 0.9 * self.height / coin.TX_COUNT_HEIGHT, 1.0) + return (tail_count * coin.TX_PER_BLOCK + + max(coin.TX_COUNT - self.tx_count, 0)) * realism + + # - Flushing + def flush_data(self): + """The data for a flush. The lock must be taken.""" + assert self.state_lock.locked() + return FlushData(self.height, self.tx_count, self.headers, + self.tx_hashes, self.undo_infos, self.utxo_cache, + self.db_deletes, self.tip) + + async def flush(self, flush_utxos): + def flush(): + self.db.flush_dbs(self.flush_data(), flush_utxos, + self.estimate_txs_remaining) + await self.run_in_thread_with_lock(flush) + + async def _maybe_flush(self): + # If caught up, flush everything as client queries are + # performed on the DB. + if self._caught_up_event.is_set(): + await self.flush(True) + elif time.time() > self.next_cache_check: + flush_arg = self.check_cache_size() + if flush_arg is not None: + await self.flush(flush_arg) + self.next_cache_check = time.time() + 30 + + def check_cache_size(self): + """Flush a cache if it gets too big.""" + # Good average estimates based on traversal of subobjects and + # requesting size from Python (see deep_getsizeof). + one_MB = 1000*1000 + utxo_cache_size = len(self.utxo_cache) * 205 + db_deletes_size = len(self.db_deletes) * 57 + hist_cache_size = self.db.history.unflushed_memsize() + # Roughly ntxs * 32 + nblocks * 42 + tx_hash_size = ((self.tx_count - self.db.fs_tx_count) * 32 + + (self.height - self.db.fs_height) * 42) + utxo_MB = (db_deletes_size + utxo_cache_size) // one_MB + hist_MB = (hist_cache_size + tx_hash_size) // one_MB + + self.logger.info('our height: {:,d} daemon: {:,d} ' + 'UTXOs {:,d}MB hist {:,d}MB' + .format(self.height, self.daemon.cached_height(), + utxo_MB, hist_MB)) + + # Flush history if it takes up over 20% of cache memory. + # Flush UTXOs once they take up 80% of cache memory. + cache_MB = self.env.cache_MB + if utxo_MB + hist_MB >= cache_MB or hist_MB >= cache_MB // 5: + return utxo_MB >= cache_MB * 4 // 5 + return None + + def advance_blocks(self, blocks): + """Synchronously advance the blocks. + + It is already verified they correctly connect onto our tip. + """ + min_height = self.db.min_undo_height(self.daemon.cached_height()) + height = self.height + + for block in blocks: + height += 1 + undo_info = self.advance_txs( + height, block.transactions, self.coin.electrum_header(block.header, height) + ) + if height >= min_height: + self.undo_infos.append((undo_info, height)) + self.db.write_raw_block(block.raw, height) + + headers = [block.header for block in blocks] + self.height = height + self.headers.extend(headers) + self.tip = self.coin.header_hash(headers[-1]) + + def advance_txs(self, height, txs, header): + self.tx_hashes.append(b''.join(tx_hash for tx, tx_hash in txs)) + + # Use local vars for speed in the loops + undo_info = [] + tx_num = self.tx_count + script_hashX = self.coin.hashX_from_script + s_pack = pack + put_utxo = self.utxo_cache.__setitem__ + spend_utxo = self.spend_utxo + undo_info_append = undo_info.append + update_touched = self.touched.update + hashXs_by_tx = [] + append_hashXs = hashXs_by_tx.append + + for tx, tx_hash in txs: + hashXs = [] + append_hashX = hashXs.append + tx_numb = s_pack('= len(raw_blocks) + + coin = self.coin + for raw_block in raw_blocks: + # Check and update self.tip + block = coin.block(raw_block, self.height) + header_hash = coin.header_hash(block.header) + if header_hash != self.tip: + raise ChainError('backup block {} not tip {} at height {:,d}' + .format(hash_to_hex_str(header_hash), + hash_to_hex_str(self.tip), + self.height)) + self.tip = coin.header_prevhash(block.header) + self.backup_txs(block.transactions) + self.height -= 1 + self.db.tx_counts.pop() + + self.logger.info(f'backed up to height {self.height:,d}') + + def backup_txs(self, txs): + # Prevout values, in order down the block (coinbase first if present) + # undo_info is in reverse block order + undo_info = self.db.read_undo_info(self.height) + if undo_info is None: + raise ChainError(f'no undo information found for height {self.height:,d}') + n = len(undo_info) + + # Use local vars for speed in the loops + s_pack = pack + put_utxo = self.utxo_cache.__setitem__ + spend_utxo = self.spend_utxo + script_hashX = self.coin.hashX_from_script + touched = self.touched + undo_entry_len = 12 + HASHX_LEN + + for tx, tx_hash in reversed(txs): + for idx, txout in enumerate(tx.outputs): + # Spend the TX outputs. Be careful with unspendable + # outputs - we didn't save those in the first place. + hashX = script_hashX(txout.pk_script) + if hashX: + cache_value = spend_utxo(tx_hash, idx) + touched.add(cache_value[:-12]) + + # Restore the inputs + for txin in reversed(tx.inputs): + if txin.is_generation(): + continue + n -= undo_entry_len + undo_item = undo_info[n:n + undo_entry_len] + put_utxo(txin.prev_hash + s_pack(' 1: + tx_num, = unpack('False state. + first_sync = self.db.first_sync + self.db.first_sync = False + await self.flush(True) + if first_sync: + self.logger.info(f'{torba.__version__} synced to ' + f'height {self.height:,d}') + # Reopen for serving + await self.db.open_for_serving() + + async def _first_open_dbs(self): + await self.db.open_for_sync() + self.height = self.db.db_height + self.tip = self.db.db_tip + self.tx_count = self.db.db_tx_count + + # --- External API + + async def fetch_and_process_blocks(self, caught_up_event): + """Fetch, process and index blocks from the daemon. + + Sets caught_up_event when first caught up. Flushes to disk + and shuts down cleanly if cancelled. + + This is mainly because if, during initial sync ElectrumX is + asked to shut down when a large number of blocks have been + processed but not written to disk, it should write those to + disk before exiting, as otherwise a significant amount of work + could be lost. + """ + self._caught_up_event = caught_up_event + try: + await self._first_open_dbs() + await asyncio.wait([ + self.prefetcher.main_loop(self.height), + self._process_prefetched_blocks() + ]) + except asyncio.CancelledError: + raise + except: + self.logger.exception("Block processing failed!") + raise + finally: + # Shut down block processing + self.logger.info('flushing to DB for a clean shutdown...') + await self.flush(True) + self.db.close() + + def force_chain_reorg(self, count): + """Force a reorg of the given number of blocks. + + Returns True if a reorg is queued, false if not caught up. + """ + if self._caught_up_event.is_set(): + self.reorg_count = count + self.blocks_event.set() + return True + return False + + +class DecredBlockProcessor(BlockProcessor): + async def calc_reorg_range(self, count): + start, count = await super().calc_reorg_range(count) + if start > 0: + # A reorg in Decred can invalidate the previous block + start -= 1 + count += 1 + return start, count + + +class NamecoinBlockProcessor(BlockProcessor): + def advance_txs(self, txs): + result = super().advance_txs(txs) + + tx_num = self.tx_count - len(txs) + script_name_hashX = self.coin.name_hashX_from_script + update_touched = self.touched.update + hashXs_by_tx = [] + append_hashXs = hashXs_by_tx.append + + for tx, tx_hash in txs: + hashXs = [] + append_hashX = hashXs.append + + # Add the new UTXOs and associate them with the name script + for idx, txout in enumerate(tx.outputs): + # Get the hashX of the name script. Ignore non-name scripts. + hashX = script_name_hashX(txout.pk_script) + if hashX: + append_hashX(hashX) + + append_hashXs(hashXs) + update_touched(hashXs) + tx_num += 1 + + self.db.history.add_unflushed(hashXs_by_tx, self.tx_count - len(txs)) + + return result + class Timer: diff --git a/torba/torba/server/cli.py b/lbry/lbry/wallet/server/cli.py similarity index 100% rename from torba/torba/server/cli.py rename to lbry/lbry/wallet/server/cli.py diff --git a/lbry/lbry/wallet/server/coin.py b/lbry/lbry/wallet/server/coin.py index eb136dc03..d709477f5 100644 --- a/lbry/lbry/wallet/server/coin.py +++ b/lbry/lbry/wallet/server/coin.py @@ -4,7 +4,6 @@ from hashlib import sha256 from torba.server.script import ScriptPubKey, OpCodes from torba.server.util import cachedproperty from torba.server.hash import hash_to_hex_str, HASHX_LEN -from torba.server.coins import Coin, CoinError from torba.server.tx import DeserializerSegWit from lbry.wallet.script import OutputScript @@ -12,6 +11,241 @@ from .session import LBRYElectrumX, LBRYSessionManager from .block_processor import LBRYBlockProcessor from .daemon import LBCDaemon from .db.writer import LBRYDB +from collections import namedtuple + +import re +import struct +from decimal import Decimal +from hashlib import sha256 +from functools import partial +import base64 +from typing import Type, List + +import torba.server.util as util +from torba.server.hash import Base58, hash160, double_sha256, hash_to_hex_str +from torba.server.hash import HASHX_LEN, hex_str_to_hash +from torba.server.script import ScriptPubKey, OpCodes +import torba.server.tx as lib_tx +import torba.server.block_processor as block_proc +from torba.server.db import DB +import torba.server.daemon as daemon +from torba.server.session import ElectrumX, DashElectrumX, SessionManager + + +Block = namedtuple("Block", "raw header transactions") +OP_RETURN = OpCodes.OP_RETURN + + +class CoinError(Exception): + """Exception raised for coin-related errors.""" + + +class Coin: + """Base class of coin hierarchy.""" + + REORG_LIMIT = 200 + # Not sure if these are coin-specific + RPC_URL_REGEX = re.compile('.+@(\\[[0-9a-fA-F:]+\\]|[^:]+)(:[0-9]+)?') + VALUE_PER_COIN = 100000000 + CHUNK_SIZE = 2016 + BASIC_HEADER_SIZE = 80 + STATIC_BLOCK_HEADERS = True + SESSIONCLS = ElectrumX + DESERIALIZER = lib_tx.Deserializer + DAEMON = daemon.Daemon + BLOCK_PROCESSOR = block_proc.BlockProcessor + SESSION_MANAGER = SessionManager + DB = DB + HEADER_VALUES = [ + 'version', 'prev_block_hash', 'merkle_root', 'timestamp', 'bits', 'nonce' + ] + HEADER_UNPACK = struct.Struct('< I 32s 32s I I I').unpack_from + MEMPOOL_HISTOGRAM_REFRESH_SECS = 500 + XPUB_VERBYTES = bytes('????', 'utf-8') + XPRV_VERBYTES = bytes('????', 'utf-8') + ENCODE_CHECK = Base58.encode_check + DECODE_CHECK = Base58.decode_check + # Peer discovery + PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'} + PEERS: List[str] = [] + + @classmethod + def lookup_coin_class(cls, name, net): + """Return a coin class given name and network. + + Raise an exception if unrecognised.""" + req_attrs = ['TX_COUNT', 'TX_COUNT_HEIGHT', 'TX_PER_BLOCK'] + for coin in util.subclasses(Coin): + if (coin.NAME.lower() == name.lower() and + coin.NET.lower() == net.lower()): + coin_req_attrs = req_attrs.copy() + missing = [attr for attr in coin_req_attrs + if not hasattr(coin, attr)] + if missing: + raise CoinError(f'coin {name} missing {missing} attributes') + return coin + raise CoinError(f'unknown coin {name} and network {net} combination') + + @classmethod + def sanitize_url(cls, url): + # Remove surrounding ws and trailing /s + url = url.strip().rstrip('/') + match = cls.RPC_URL_REGEX.match(url) + if not match: + raise CoinError(f'invalid daemon URL: "{url}"') + if match.groups()[1] is None: + url += f':{cls.RPC_PORT:d}' + if not url.startswith('http://') and not url.startswith('https://'): + url = 'http://' + url + return url + '/' + + @classmethod + def genesis_block(cls, block): + """Check the Genesis block is the right one for this coin. + + Return the block less its unspendable coinbase. + """ + header = cls.block_header(block, 0) + header_hex_hash = hash_to_hex_str(cls.header_hash(header)) + if header_hex_hash != cls.GENESIS_HASH: + raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}') + + return header + bytes(1) + + @classmethod + def hashX_from_script(cls, script): + """Returns a hashX from a script, or None if the script is provably + unspendable so the output can be dropped. + """ + if script and script[0] == OP_RETURN: + return None + return sha256(script).digest()[:HASHX_LEN] + + @staticmethod + def lookup_xverbytes(verbytes): + """Return a (is_xpub, coin_class) pair given xpub/xprv verbytes.""" + # Order means BTC testnet will override NMC testnet + for coin in util.subclasses(Coin): + if verbytes == coin.XPUB_VERBYTES: + return True, coin + if verbytes == coin.XPRV_VERBYTES: + return False, coin + raise CoinError('version bytes unrecognised') + + @classmethod + def address_to_hashX(cls, address): + """Return a hashX given a coin address.""" + return cls.hashX_from_script(cls.pay_to_address_script(address)) + + @classmethod + def P2PKH_address_from_hash160(cls, hash160): + """Return a P2PKH address given a public key.""" + assert len(hash160) == 20 + return cls.ENCODE_CHECK(cls.P2PKH_VERBYTE + hash160) + + @classmethod + def P2PKH_address_from_pubkey(cls, pubkey): + """Return a coin address given a public key.""" + return cls.P2PKH_address_from_hash160(hash160(pubkey)) + + @classmethod + def P2SH_address_from_hash160(cls, hash160): + """Return a coin address given a hash160.""" + assert len(hash160) == 20 + return cls.ENCODE_CHECK(cls.P2SH_VERBYTES[0] + hash160) + + @classmethod + def hash160_to_P2PKH_script(cls, hash160): + return ScriptPubKey.P2PKH_script(hash160) + + @classmethod + def hash160_to_P2PKH_hashX(cls, hash160): + return cls.hashX_from_script(cls.hash160_to_P2PKH_script(hash160)) + + @classmethod + def pay_to_address_script(cls, address): + """Return a pubkey script that pays to a pubkey hash. + + Pass the address (either P2PKH or P2SH) in base58 form. + """ + raw = cls.DECODE_CHECK(address) + + # Require version byte(s) plus hash160. + verbyte = -1 + verlen = len(raw) - 20 + if verlen > 0: + verbyte, hash160 = raw[:verlen], raw[verlen:] + + if verbyte == cls.P2PKH_VERBYTE: + return cls.hash160_to_P2PKH_script(hash160) + if verbyte in cls.P2SH_VERBYTES: + return ScriptPubKey.P2SH_script(hash160) + + raise CoinError(f'invalid address: {address}') + + @classmethod + def privkey_WIF(cls, privkey_bytes, compressed): + """Return the private key encoded in Wallet Import Format.""" + payload = bytearray(cls.WIF_BYTE) + privkey_bytes + if compressed: + payload.append(0x01) + return cls.ENCODE_CHECK(payload) + + @classmethod + def header_hash(cls, header): + """Given a header return hash""" + return double_sha256(header) + + @classmethod + def header_prevhash(cls, header): + """Given a header return previous hash""" + return header[4:36] + + @classmethod + def static_header_offset(cls, height): + """Given a header height return its offset in the headers file. + + If header sizes change at some point, this is the only code + that needs updating.""" + assert cls.STATIC_BLOCK_HEADERS + return height * cls.BASIC_HEADER_SIZE + + @classmethod + def static_header_len(cls, height): + """Given a header height return its length.""" + return (cls.static_header_offset(height + 1) + - cls.static_header_offset(height)) + + @classmethod + def block_header(cls, block, height): + """Returns the block header given a block and its height.""" + return block[:cls.static_header_len(height)] + + @classmethod + def block(cls, raw_block, height): + """Return a Block namedtuple given a raw block and its height.""" + header = cls.block_header(raw_block, height) + txs = cls.DESERIALIZER(raw_block, start=len(header)).read_tx_block() + return Block(raw_block, header, txs) + + @classmethod + def decimal_value(cls, value): + """Return the number of standard coin units as a Decimal given a + quantity of smallest units. + + For example 1 BTC is returned for 100 million satoshis. + """ + return Decimal(value) / cls.VALUE_PER_COIN + + @classmethod + def electrum_header(cls, header, height): + h = dict(zip(cls.HEADER_VALUES, cls.HEADER_UNPACK(header))) + # Add the height that is not present in the header itself + h['block_height'] = height + # Convert bytes to str + h['prev_block_hash'] = hash_to_hex_str(h['prev_block_hash']) + h['merkle_root'] = hash_to_hex_str(h['merkle_root']) + return h class LBC(Coin): diff --git a/lbry/lbry/wallet/server/daemon.py b/lbry/lbry/wallet/server/daemon.py index dcc3bb940..d46cb6ec9 100644 --- a/lbry/lbry/wallet/server/daemon.py +++ b/lbry/lbry/wallet/server/daemon.py @@ -1,7 +1,464 @@ from functools import wraps from torba.rpc.jsonrpc import RPCError -from torba.server.daemon import Daemon, DaemonError + +import asyncio +import itertools +import json +import time +from calendar import timegm +from struct import pack +from time import strptime + +import aiohttp + +from torba.server.util import hex_to_bytes, class_logger, \ + unpack_le_uint16_from, pack_varint +from torba.server.hash import hex_str_to_hash, hash_to_hex_str +from torba.server.tx import DeserializerDecred +from torba.rpc import JSONRPC + + +class DaemonError(Exception): + """Raised when the daemon returns an error in its results.""" + + +class WarmingUpError(Exception): + """Internal - when the daemon is warming up.""" + + +class WorkQueueFullError(Exception): + """Internal - when the daemon's work queue is full.""" + + +class Daemon: + """Handles connections to a daemon at the given URL.""" + + WARMING_UP = -28 + id_counter = itertools.count() + + def __init__(self, coin, url, max_workqueue=10, init_retry=0.25, + max_retry=4.0): + self.coin = coin + self.logger = class_logger(__name__, self.__class__.__name__) + self.set_url(url) + # Limit concurrent RPC calls to this number. + # See DEFAULT_HTTP_WORKQUEUE in bitcoind, which is typically 16 + self.workqueue_semaphore = asyncio.Semaphore(value=max_workqueue) + self.init_retry = init_retry + self.max_retry = max_retry + self._height = None + self.available_rpcs = {} + self.connector = aiohttp.TCPConnector() + + async def close(self): + if self.connector: + await self.connector.close() + self.connector = None + + def set_url(self, url): + """Set the URLS to the given list, and switch to the first one.""" + urls = url.split(',') + urls = [self.coin.sanitize_url(url) for url in urls] + for n, url in enumerate(urls): + status = '' if n else ' (current)' + logged_url = self.logged_url(url) + self.logger.info(f'daemon #{n + 1} at {logged_url}{status}') + self.url_index = 0 + self.urls = urls + + def current_url(self): + """Returns the current daemon URL.""" + return self.urls[self.url_index] + + def logged_url(self, url=None): + """The host and port part, for logging.""" + url = url or self.current_url() + return url[url.rindex('@') + 1:] + + def failover(self): + """Call to fail-over to the next daemon URL. + + Returns False if there is only one, otherwise True. + """ + if len(self.urls) > 1: + self.url_index = (self.url_index + 1) % len(self.urls) + self.logger.info(f'failing over to {self.logged_url()}') + return True + return False + + def client_session(self): + """An aiohttp client session.""" + return aiohttp.ClientSession(connector=self.connector, connector_owner=False) + + async def _send_data(self, data): + if not self.connector: + raise asyncio.CancelledError('Tried to send request during shutdown.') + async with self.workqueue_semaphore: + async with self.client_session() as session: + async with session.post(self.current_url(), data=data) as resp: + kind = resp.headers.get('Content-Type', None) + if kind == 'application/json': + return await resp.json() + # bitcoind's HTTP protocol "handling" is a bad joke + text = await resp.text() + if 'Work queue depth exceeded' in text: + raise WorkQueueFullError + text = text.strip() or resp.reason + self.logger.error(text) + raise DaemonError(text) + + async def _send(self, payload, processor): + """Send a payload to be converted to JSON. + + Handles temporary connection issues. Daemon response errors + are raise through DaemonError. + """ + def log_error(error): + nonlocal last_error_log, retry + now = time.time() + if now - last_error_log > 60: + last_error_log = now + self.logger.error(f'{error} Retrying occasionally...') + if retry == self.max_retry and self.failover(): + retry = 0 + + on_good_message = None + last_error_log = 0 + data = json.dumps(payload) + retry = self.init_retry + while True: + try: + result = await self._send_data(data) + result = processor(result) + if on_good_message: + self.logger.info(on_good_message) + return result + except asyncio.TimeoutError: + log_error('timeout error.') + except aiohttp.ServerDisconnectedError: + log_error('disconnected.') + on_good_message = 'connection restored' + except aiohttp.ClientConnectionError: + log_error('connection problem - is your daemon running?') + on_good_message = 'connection restored' + except aiohttp.ClientError as e: + log_error(f'daemon error: {e}') + on_good_message = 'running normally' + except WarmingUpError: + log_error('starting up checking blocks.') + on_good_message = 'running normally' + except WorkQueueFullError: + log_error('work queue full.') + on_good_message = 'running normally' + + await asyncio.sleep(retry) + retry = max(min(self.max_retry, retry * 2), self.init_retry) + + async def _send_single(self, method, params=None): + """Send a single request to the daemon.""" + def processor(result): + err = result['error'] + if not err: + return result['result'] + if err.get('code') == self.WARMING_UP: + raise WarmingUpError + raise DaemonError(err) + + payload = {'method': method, 'id': next(self.id_counter)} + if params: + payload['params'] = params + return await self._send(payload, processor) + + async def _send_vector(self, method, params_iterable, replace_errs=False): + """Send several requests of the same method. + + The result will be an array of the same length as params_iterable. + If replace_errs is true, any item with an error is returned as None, + otherwise an exception is raised.""" + def processor(result): + errs = [item['error'] for item in result if item['error']] + if any(err.get('code') == self.WARMING_UP for err in errs): + raise WarmingUpError + if not errs or replace_errs: + return [item['result'] for item in result] + raise DaemonError(errs) + + payload = [{'method': method, 'params': p, 'id': next(self.id_counter)} + for p in params_iterable] + if payload: + return await self._send(payload, processor) + return [] + + async def _is_rpc_available(self, method): + """Return whether given RPC method is available in the daemon. + + Results are cached and the daemon will generally not be queried with + the same method more than once.""" + available = self.available_rpcs.get(method) + if available is None: + available = True + try: + await self._send_single(method) + except DaemonError as e: + err = e.args[0] + error_code = err.get("code") + available = error_code != JSONRPC.METHOD_NOT_FOUND + self.available_rpcs[method] = available + return available + + async def block_hex_hashes(self, first, count): + """Return the hex hashes of count block starting at height first.""" + params_iterable = ((h, ) for h in range(first, first + count)) + return await self._send_vector('getblockhash', params_iterable) + + async def deserialised_block(self, hex_hash): + """Return the deserialised block with the given hex hash.""" + return await self._send_single('getblock', (hex_hash, True)) + + async def raw_blocks(self, hex_hashes): + """Return the raw binary blocks with the given hex hashes.""" + params_iterable = ((h, False) for h in hex_hashes) + blocks = await self._send_vector('getblock', params_iterable) + # Convert hex string to bytes + return [hex_to_bytes(block) for block in blocks] + + async def mempool_hashes(self): + """Update our record of the daemon's mempool hashes.""" + return await self._send_single('getrawmempool') + + async def estimatefee(self, block_count): + """Return the fee estimate for the block count. Units are whole + currency units per KB, e.g. 0.00000995, or -1 if no estimate + is available. + """ + args = (block_count, ) + if await self._is_rpc_available('estimatesmartfee'): + estimate = await self._send_single('estimatesmartfee', args) + return estimate.get('feerate', -1) + return await self._send_single('estimatefee', args) + + async def getnetworkinfo(self): + """Return the result of the 'getnetworkinfo' RPC call.""" + return await self._send_single('getnetworkinfo') + + async def relayfee(self): + """The minimum fee a low-priority tx must pay in order to be accepted + to the daemon's memory pool.""" + network_info = await self.getnetworkinfo() + return network_info['relayfee'] + + async def getrawtransaction(self, hex_hash, verbose=False): + """Return the serialized raw transaction with the given hash.""" + # Cast to int because some coin daemons are old and require it + return await self._send_single('getrawtransaction', + (hex_hash, int(verbose))) + + async def getrawtransactions(self, hex_hashes, replace_errs=True): + """Return the serialized raw transactions with the given hashes. + + Replaces errors with None by default.""" + params_iterable = ((hex_hash, 0) for hex_hash in hex_hashes) + txs = await self._send_vector('getrawtransaction', params_iterable, + replace_errs=replace_errs) + # Convert hex strings to bytes + return [hex_to_bytes(tx) if tx else None for tx in txs] + + async def broadcast_transaction(self, raw_tx): + """Broadcast a transaction to the network.""" + return await self._send_single('sendrawtransaction', (raw_tx, )) + + async def height(self): + """Query the daemon for its current height.""" + self._height = await self._send_single('getblockcount') + return self._height + + def cached_height(self): + """Return the cached daemon height. + + If the daemon has not been queried yet this returns None.""" + return self._height + + +class DashDaemon(Daemon): + + async def masternode_broadcast(self, params): + """Broadcast a transaction to the network.""" + return await self._send_single('masternodebroadcast', params) + + async def masternode_list(self, params): + """Return the masternode status.""" + return await self._send_single('masternodelist', params) + + +class FakeEstimateFeeDaemon(Daemon): + """Daemon that simulates estimatefee and relayfee RPC calls. Coin that + wants to use this daemon must define ESTIMATE_FEE & RELAY_FEE""" + + async def estimatefee(self, block_count): + """Return the fee estimate for the given parameters.""" + return self.coin.ESTIMATE_FEE + + async def relayfee(self): + """The minimum fee a low-priority tx must pay in order to be accepted + to the daemon's memory pool.""" + return self.coin.RELAY_FEE + + +class LegacyRPCDaemon(Daemon): + """Handles connections to a daemon at the given URL. + + This class is useful for daemons that don't have the new 'getblock' + RPC call that returns the block in hex, the workaround is to manually + recreate the block bytes. The recreated block bytes may not be the exact + as in the underlying blockchain but it is good enough for our indexing + purposes.""" + + async def raw_blocks(self, hex_hashes): + """Return the raw binary blocks with the given hex hashes.""" + params_iterable = ((h, ) for h in hex_hashes) + block_info = await self._send_vector('getblock', params_iterable) + + blocks = [] + for i in block_info: + raw_block = await self.make_raw_block(i) + blocks.append(raw_block) + + # Convert hex string to bytes + return blocks + + async def make_raw_header(self, b): + pbh = b.get('previousblockhash') + if pbh is None: + pbh = '0' * 64 + return b''.join([ + pack(' 0: + transactions = await self.getrawtransactions(b.get('tx'), False) + + raw_block = header + num_txs = len(transactions) + if num_txs > 0: + raw_block += pack_varint(num_txs) + raw_block += b''.join(transactions) + else: + raw_block += b'\x00' + + return raw_block + + def timestamp_safe(self, t): + if isinstance(t, int): + return t + return timegm(strptime(t, "%Y-%m-%d %H:%M:%S %Z")) + + +class DecredDaemon(Daemon): + async def raw_blocks(self, hex_hashes): + """Return the raw binary blocks with the given hex hashes.""" + + params_iterable = ((h, False) for h in hex_hashes) + blocks = await self._send_vector('getblock', params_iterable) + + raw_blocks = [] + valid_tx_tree = {} + for block in blocks: + # Convert to bytes from hex + raw_block = hex_to_bytes(block) + raw_blocks.append(raw_block) + # Check if previous block is valid + prev = self.prev_hex_hash(raw_block) + votebits = unpack_le_uint16_from(raw_block[100:102])[0] + valid_tx_tree[prev] = self.is_valid_tx_tree(votebits) + + processed_raw_blocks = [] + for hash, raw_block in zip(hex_hashes, raw_blocks): + if hash in valid_tx_tree: + is_valid = valid_tx_tree[hash] + else: + # Do something complicated to figure out if this block is valid + header = await self._send_single('getblockheader', (hash, )) + if 'nextblockhash' not in header: + raise DaemonError(f'Could not find next block for {hash}') + next_hash = header['nextblockhash'] + next_header = await self._send_single('getblockheader', + (next_hash, )) + is_valid = self.is_valid_tx_tree(next_header['votebits']) + + if is_valid: + processed_raw_blocks.append(raw_block) + else: + # If this block is invalid remove the normal transactions + self.logger.info(f'block {hash} is invalidated') + processed_raw_blocks.append(self.strip_tx_tree(raw_block)) + + return processed_raw_blocks + + @staticmethod + def prev_hex_hash(raw_block): + return hash_to_hex_str(raw_block[4:36]) + + @staticmethod + def is_valid_tx_tree(votebits): + # Check if previous block was invalidated. + return bool(votebits & (1 << 0) != 0) + + def strip_tx_tree(self, raw_block): + c = self.coin + assert issubclass(c.DESERIALIZER, DeserializerDecred) + d = c.DESERIALIZER(raw_block, start=c.BASIC_HEADER_SIZE) + d.read_tx_tree() # Skip normal transactions + # Create a fake block without any normal transactions + return raw_block[:c.BASIC_HEADER_SIZE] + b'\x00' + raw_block[d.cursor:] + + async def height(self): + height = await super().height() + if height > 0: + # Lie about the daemon height as the current tip can be invalidated + height -= 1 + self._height = height + return height + + async def mempool_hashes(self): + mempool = await super().mempool_hashes() + # Add current tip transactions to the 'fake' mempool. + real_height = await self._send_single('getblockcount') + tip_hash = await self._send_single('getblockhash', (real_height,)) + tip = await self.deserialised_block(tip_hash) + # Add normal transactions except coinbase + mempool += tip['tx'][1:] + # Add stake transactions if applicable + mempool += tip.get('stx', []) + return mempool + + def client_session(self): + # FIXME allow self signed certificates + connector = aiohttp.TCPConnector(verify_ssl=False) + return aiohttp.ClientSession(connector=connector) + + +class PreLegacyRPCDaemon(LegacyRPCDaemon): + """Handles connections to a daemon at the given URL. + + This class is useful for daemons that don't have the new 'getblock' + RPC call that returns the block in hex, and need the False parameter + for the getblock""" + + async def deserialised_block(self, hex_hash): + """Return the deserialised block with the given hex hash.""" + return await self._send_single('getblock', (hex_hash, False)) def handles_errors(decorated_function): diff --git a/torba/torba/server/env.py b/lbry/lbry/wallet/server/env.py similarity index 100% rename from torba/torba/server/env.py rename to lbry/lbry/wallet/server/env.py diff --git a/torba/torba/server/hash.py b/lbry/lbry/wallet/server/hash.py similarity index 100% rename from torba/torba/server/hash.py rename to lbry/lbry/wallet/server/hash.py diff --git a/torba/torba/server/history.py b/lbry/lbry/wallet/server/history.py similarity index 100% rename from torba/torba/server/history.py rename to lbry/lbry/wallet/server/history.py diff --git a/torba/torba/server/db.py b/lbry/lbry/wallet/server/leveldb.py similarity index 100% rename from torba/torba/server/db.py rename to lbry/lbry/wallet/server/leveldb.py diff --git a/torba/torba/server/mempool.py b/lbry/lbry/wallet/server/mempool.py similarity index 100% rename from torba/torba/server/mempool.py rename to lbry/lbry/wallet/server/mempool.py diff --git a/torba/torba/server/merkle.py b/lbry/lbry/wallet/server/merkle.py similarity index 100% rename from torba/torba/server/merkle.py rename to lbry/lbry/wallet/server/merkle.py diff --git a/torba/torba/server/peer.py b/lbry/lbry/wallet/server/peer.py similarity index 100% rename from torba/torba/server/peer.py rename to lbry/lbry/wallet/server/peer.py diff --git a/torba/torba/server/peers.py b/lbry/lbry/wallet/server/peers.py similarity index 100% rename from torba/torba/server/peers.py rename to lbry/lbry/wallet/server/peers.py diff --git a/torba/torba/server/script.py b/lbry/lbry/wallet/server/script.py similarity index 100% rename from torba/torba/server/script.py rename to lbry/lbry/wallet/server/script.py diff --git a/torba/torba/server/server.py b/lbry/lbry/wallet/server/server.py similarity index 100% rename from torba/torba/server/server.py rename to lbry/lbry/wallet/server/server.py diff --git a/lbry/lbry/wallet/server/session.py b/lbry/lbry/wallet/server/session.py index a349764d1..611f6658d 100644 --- a/lbry/lbry/wallet/server/session.py +++ b/lbry/lbry/wallet/server/session.py @@ -19,8 +19,1467 @@ from lbry.wallet.server.websocket import AdminWebSocket from lbry.wallet.server.metrics import ServerLoadData, APICallMetrics from lbry import __version__ as sdk_version +import base64 +import collections +import asyncio +import codecs +import datetime +import itertools +import json +import os +import zlib + +import pylru +import ssl +import time +import typing +from asyncio import Event, sleep +from collections import defaultdict +from functools import partial + +import torba +from torba.rpc import ( + RPCSession, JSONRPCAutoDetect, JSONRPCConnection, + handler_invocation, RPCError, Request +) +from torba.server import text +from torba.server import util +from torba.server.hash import (sha256, hash_to_hex_str, hex_str_to_hash, + HASHX_LEN, Base58Error) +from torba.server.daemon import DaemonError +from torba.server.peers import PeerManager +if typing.TYPE_CHECKING: + from torba.server.env import Env + from torba.server.db import DB + from torba.server.block_processor import BlockProcessor + from torba.server.mempool import MemPool + from torba.server.daemon import Daemon + +BAD_REQUEST = 1 +DAEMON_ERROR = 2 + log = logging.getLogger(__name__) +def scripthash_to_hashX(scripthash: str) -> bytes: + try: + bin_hash = hex_str_to_hash(scripthash) + if len(bin_hash) == 32: + return bin_hash[:HASHX_LEN] + except Exception: + pass + raise RPCError(BAD_REQUEST, f'{scripthash} is not a valid script hash') + + +def non_negative_integer(value) -> int: + """Return param value it is or can be converted to a non-negative + integer, otherwise raise an RPCError.""" + try: + value = int(value) + if value >= 0: + return value + except ValueError: + pass + raise RPCError(BAD_REQUEST, + f'{value} should be a non-negative integer') + + +def assert_boolean(value) -> bool: + """Return param value it is boolean otherwise raise an RPCError.""" + if value in (False, True): + return value + raise RPCError(BAD_REQUEST, f'{value} should be a boolean value') + + +def assert_tx_hash(value: str) -> None: + """Raise an RPCError if the value is not a valid transaction + hash.""" + try: + if len(util.hex_to_bytes(value)) == 32: + return + except Exception: + pass + raise RPCError(BAD_REQUEST, f'{value} should be a transaction hash') + + +class Semaphores: + """For aiorpcX's semaphore handling.""" + + def __init__(self, semaphores): + self.semaphores = semaphores + self.acquired = [] + + async def __aenter__(self): + for semaphore in self.semaphores: + await semaphore.acquire() + self.acquired.append(semaphore) + + async def __aexit__(self, exc_type, exc_value, traceback): + for semaphore in self.acquired: + semaphore.release() + + +class SessionGroup: + + def __init__(self, gid: int): + self.gid = gid + # Concurrency per group + self.semaphore = asyncio.Semaphore(20) + + +class SessionManager: + """Holds global state about all sessions.""" + + def __init__(self, env: 'Env', db: 'DB', bp: 'BlockProcessor', daemon: 'Daemon', mempool: 'MemPool', + shutdown_event: asyncio.Event): + env.max_send = max(350000, env.max_send) + self.env = env + self.db = db + self.bp = bp + self.daemon = daemon + self.mempool = mempool + self.peer_mgr = PeerManager(env, db) + self.shutdown_event = shutdown_event + self.logger = util.class_logger(__name__, self.__class__.__name__) + self.servers: typing.Dict[str, asyncio.AbstractServer] = {} + self.sessions: typing.Set['SessionBase'] = set() + self.cur_group = SessionGroup(0) + self.txs_sent = 0 + self.start_time = time.time() + self.history_cache = pylru.lrucache(256) + self.notified_height: typing.Optional[int] = None + # Cache some idea of room to avoid recounting on each subscription + self.subs_room = 0 + # Masternode stuff only for such coins + if issubclass(env.coin.SESSIONCLS, DashElectrumX): + self.mn_cache_height = 0 + self.mn_cache = [] # type: ignore + + self.session_event = Event() + + # Set up the RPC request handlers + cmds = ('add_peer daemon_url disconnect getinfo groups log peers ' + 'query reorg sessions stop'.split()) + LocalRPC.request_handlers.update( + {cmd: getattr(self, 'rpc_' + cmd) for cmd in cmds} + ) + + async def _start_server(self, kind, *args, **kw_args): + loop = asyncio.get_event_loop() + if kind == 'RPC': + protocol_class = LocalRPC + else: + protocol_class = self.env.coin.SESSIONCLS + protocol_factory = partial(protocol_class, self, self.db, + self.mempool, self.peer_mgr, kind) + + host, port = args[:2] + try: + self.servers[kind] = await loop.create_server(protocol_factory, *args, **kw_args) + except OSError as e: # don't suppress CancelledError + self.logger.error(f'{kind} server failed to listen on {host}:' + f'{port:d} :{e!r}') + else: + self.logger.info(f'{kind} server listening on {host}:{port:d}') + + async def _start_external_servers(self): + """Start listening on TCP and SSL ports, but only if the respective + port was given in the environment. + """ + env = self.env + host = env.cs_host(for_rpc=False) + if env.tcp_port is not None: + await self._start_server('TCP', host, env.tcp_port) + if env.ssl_port is not None: + sslc = ssl.SSLContext(ssl.PROTOCOL_TLS) + sslc.load_cert_chain(env.ssl_certfile, keyfile=env.ssl_keyfile) + await self._start_server('SSL', host, env.ssl_port, ssl=sslc) + + async def _close_servers(self, kinds): + """Close the servers of the given kinds (TCP etc.).""" + if kinds: + self.logger.info('closing down {} listening servers' + .format(', '.join(kinds))) + for kind in kinds: + server = self.servers.pop(kind, None) + if server: + server.close() + await server.wait_closed() + + async def _manage_servers(self): + paused = False + max_sessions = self.env.max_sessions + low_watermark = max_sessions * 19 // 20 + while True: + await self.session_event.wait() + self.session_event.clear() + if not paused and len(self.sessions) >= max_sessions: + self.logger.info(f'maximum sessions {max_sessions:,d} ' + f'reached, stopping new connections until ' + f'count drops to {low_watermark:,d}') + await self._close_servers(['TCP', 'SSL']) + paused = True + # Start listening for incoming connections if paused and + # session count has fallen + if paused and len(self.sessions) <= low_watermark: + self.logger.info('resuming listening for incoming connections') + await self._start_external_servers() + paused = False + + async def _log_sessions(self): + """Periodically log sessions.""" + log_interval = self.env.log_sessions + if log_interval: + while True: + await sleep(log_interval) + data = self._session_data(for_log=True) + for line in text.sessions_lines(data): + self.logger.info(line) + self.logger.info(json.dumps(self._get_info())) + + def _group_map(self): + group_map = defaultdict(list) + for session in self.sessions: + group_map[session.group].append(session) + return group_map + + def _sub_count(self) -> int: + return sum(s.sub_count() for s in self.sessions) + + def _lookup_session(self, session_id): + try: + session_id = int(session_id) + except Exception: + pass + else: + for session in self.sessions: + if session.session_id == session_id: + return session + return None + + async def _for_each_session(self, session_ids, operation): + if not isinstance(session_ids, list): + raise RPCError(BAD_REQUEST, 'expected a list of session IDs') + + result = [] + for session_id in session_ids: + session = self._lookup_session(session_id) + if session: + result.append(await operation(session)) + else: + result.append(f'unknown session: {session_id}') + return result + + async def _clear_stale_sessions(self): + """Cut off sessions that haven't done anything for 10 minutes.""" + session_timeout = self.env.session_timeout + while True: + await sleep(session_timeout // 10) + stale_cutoff = time.perf_counter() - session_timeout + stale_sessions = [session for session in self.sessions + if session.last_recv < stale_cutoff] + if stale_sessions: + text = ', '.join(str(session.session_id) + for session in stale_sessions) + self.logger.info(f'closing stale connections {text}') + # Give the sockets some time to close gracefully + if stale_sessions: + await asyncio.wait([ + session.close(force_after=session_timeout // 10) for session in stale_sessions + ]) + + # Consolidate small groups + group_map = self._group_map() + groups = [group for group, sessions in group_map.items() + if len(sessions) <= 5] # fixme: apply session cost here + if len(groups) > 1: + new_group = groups[-1] + for group in groups: + for session in group_map[group]: + session.group = new_group + + def _get_info(self): + """A summary of server state.""" + group_map = self._group_map() + method_counts = collections.defaultdict(int) + error_count = 0 + logged = 0 + paused = 0 + pending_requests = 0 + closing = 0 + + for s in self.sessions: + error_count += s.errors + if s.log_me: + logged += 1 + if not s._can_send.is_set(): + paused += 1 + pending_requests += s.count_pending_items() + if s.is_closing(): + closing += 1 + for request, _ in s.connection._requests.values(): + method_counts[request.method] += 1 + return { + 'closing': closing, + 'daemon': self.daemon.logged_url(), + 'daemon_height': self.daemon.cached_height(), + 'db_height': self.db.db_height, + 'errors': error_count, + 'groups': len(group_map), + 'logged': logged, + 'paused': paused, + 'pid': os.getpid(), + 'peers': self.peer_mgr.info(), + 'requests': pending_requests, + 'method_counts': method_counts, + 'sessions': self.session_count(), + 'subs': self._sub_count(), + 'txs_sent': self.txs_sent, + 'uptime': util.formatted_time(time.time() - self.start_time), + 'version': torba.__version__, + } + + def _session_data(self, for_log): + """Returned to the RPC 'sessions' call.""" + now = time.time() + sessions = sorted(self.sessions, key=lambda s: s.start_time) + return [(session.session_id, + session.flags(), + session.peer_address_str(for_log=for_log), + session.client, + session.protocol_version_string(), + session.count_pending_items(), + session.txs_sent, + session.sub_count(), + session.recv_count, session.recv_size, + session.send_count, session.send_size, + now - session.start_time) + for session in sessions] + + def _group_data(self): + """Returned to the RPC 'groups' call.""" + result = [] + group_map = self._group_map() + for group, sessions in group_map.items(): + result.append([group.gid, + len(sessions), + sum(s.bw_charge for s in sessions), + sum(s.count_pending_items() for s in sessions), + sum(s.txs_sent for s in sessions), + sum(s.sub_count() for s in sessions), + sum(s.recv_count for s in sessions), + sum(s.recv_size for s in sessions), + sum(s.send_count for s in sessions), + sum(s.send_size for s in sessions), + ]) + return result + + async def _electrum_and_raw_headers(self, height): + raw_header = await self.raw_header(height) + electrum_header = self.env.coin.electrum_header(raw_header, height) + return electrum_header, raw_header + + async def _refresh_hsub_results(self, height): + """Refresh the cached header subscription responses to be for height, + and record that as notified_height. + """ + # Paranoia: a reorg could race and leave db_height lower + height = min(height, self.db.db_height) + electrum, raw = await self._electrum_and_raw_headers(height) + self.hsub_results = (electrum, {'hex': raw.hex(), 'height': height}) + self.notified_height = height + + # --- LocalRPC command handlers + + async def rpc_add_peer(self, real_name): + """Add a peer. + + real_name: "bch.electrumx.cash t50001 s50002" for example + """ + await self.peer_mgr.add_localRPC_peer(real_name) + return f"peer '{real_name}' added" + + async def rpc_disconnect(self, session_ids): + """Disconnect sessions. + + session_ids: array of session IDs + """ + async def close(session): + """Close the session's transport.""" + await session.close(force_after=2) + return f'disconnected {session.session_id}' + + return await self._for_each_session(session_ids, close) + + async def rpc_log(self, session_ids): + """Toggle logging of sessions. + + session_ids: array of session IDs + """ + async def toggle_logging(session): + """Toggle logging of the session.""" + session.toggle_logging() + return f'log {session.session_id}: {session.log_me}' + + return await self._for_each_session(session_ids, toggle_logging) + + async def rpc_daemon_url(self, daemon_url): + """Replace the daemon URL.""" + daemon_url = daemon_url or self.env.daemon_url + try: + self.daemon.set_url(daemon_url) + except Exception as e: + raise RPCError(BAD_REQUEST, f'an error occurred: {e!r}') + return f'now using daemon at {self.daemon.logged_url()}' + + async def rpc_stop(self): + """Shut down the server cleanly.""" + self.shutdown_event.set() + return 'stopping' + + async def rpc_getinfo(self): + """Return summary information about the server process.""" + return self._get_info() + + async def rpc_groups(self): + """Return statistics about the session groups.""" + return self._group_data() + + async def rpc_peers(self): + """Return a list of data about server peers.""" + return self.peer_mgr.rpc_data() + + async def rpc_query(self, items, limit): + """Return a list of data about server peers.""" + coin = self.env.coin + db = self.db + lines = [] + + def arg_to_hashX(arg): + try: + script = bytes.fromhex(arg) + lines.append(f'Script: {arg}') + return coin.hashX_from_script(script) + except ValueError: + pass + + try: + hashX = coin.address_to_hashX(arg) + except Base58Error as e: + lines.append(e.args[0]) + return None + lines.append(f'Address: {arg}') + return hashX + + for arg in items: + hashX = arg_to_hashX(arg) + if not hashX: + continue + n = None + history = await db.limited_history(hashX, limit=limit) + for n, (tx_hash, height) in enumerate(history): + lines.append(f'History #{n:,d}: height {height:,d} ' + f'tx_hash {hash_to_hex_str(tx_hash)}') + if n is None: + lines.append('No history found') + n = None + utxos = await db.all_utxos(hashX) + for n, utxo in enumerate(utxos, start=1): + lines.append(f'UTXO #{n:,d}: tx_hash ' + f'{hash_to_hex_str(utxo.tx_hash)} ' + f'tx_pos {utxo.tx_pos:,d} height ' + f'{utxo.height:,d} value {utxo.value:,d}') + if n == limit: + break + if n is None: + lines.append('No UTXOs found') + + balance = sum(utxo.value for utxo in utxos) + lines.append(f'Balance: {coin.decimal_value(balance):,f} ' + f'{coin.SHORTNAME}') + + return lines + + async def rpc_sessions(self): + """Return statistics about connected sessions.""" + return self._session_data(for_log=False) + + async def rpc_reorg(self, count): + """Force a reorg of the given number of blocks. + + count: number of blocks to reorg + """ + count = non_negative_integer(count) + if not self.bp.force_chain_reorg(count): + raise RPCError(BAD_REQUEST, 'still catching up with daemon') + return f'scheduled a reorg of {count:,d} blocks' + + # --- External Interface + + async def serve(self, notifications, server_listening_event): + """Start the RPC server if enabled. When the event is triggered, + start TCP and SSL servers.""" + try: + if self.env.rpc_port is not None: + await self._start_server('RPC', self.env.cs_host(for_rpc=True), + self.env.rpc_port) + self.logger.info(f'max session count: {self.env.max_sessions:,d}') + self.logger.info(f'session timeout: ' + f'{self.env.session_timeout:,d} seconds') + self.logger.info(f'max response size {self.env.max_send:,d} bytes') + if self.env.drop_client is not None: + self.logger.info(f'drop clients matching: {self.env.drop_client.pattern}') + # Start notifications; initialize hsub_results + await notifications.start(self.db.db_height, self._notify_sessions) + await self.start_other() + await self._start_external_servers() + server_listening_event.set() + # Peer discovery should start after the external servers + # because we connect to ourself + await asyncio.wait([ + self.peer_mgr.discover_peers(), + self._clear_stale_sessions(), + self._log_sessions(), + self._manage_servers() + ]) + finally: + await self._close_servers(list(self.servers.keys())) + if self.sessions: + await asyncio.wait([ + session.close(force_after=1) for session in self.sessions + ]) + await self.stop_other() + + async def start_other(self): + pass + + async def stop_other(self): + pass + + def session_count(self) -> int: + """The number of connections that we've sent something to.""" + return len(self.sessions) + + async def daemon_request(self, method, *args): + """Catch a DaemonError and convert it to an RPCError.""" + try: + return await getattr(self.daemon, method)(*args) + except DaemonError as e: + raise RPCError(DAEMON_ERROR, f'daemon error: {e!r}') from None + + async def raw_header(self, height): + """Return the binary header at the given height.""" + try: + return await self.db.raw_header(height) + except IndexError: + raise RPCError(BAD_REQUEST, f'height {height:,d} ' + 'out of range') from None + + async def electrum_header(self, height): + """Return the deserialized header at the given height.""" + electrum_header, _ = await self._electrum_and_raw_headers(height) + return electrum_header + + async def broadcast_transaction(self, raw_tx): + hex_hash = await self.daemon.broadcast_transaction(raw_tx) + self.mempool.wakeup.set() + self.txs_sent += 1 + return hex_hash + + async def limited_history(self, hashX): + """A caching layer.""" + hc = self.history_cache + if hashX not in hc: + # History DoS limit. Each element of history is about 99 + # bytes when encoded as JSON. This limits resource usage + # on bloated history requests, and uses a smaller divisor + # so large requests are logged before refusing them. + limit = self.env.max_send // 97 + hc[hashX] = await self.db.limited_history(hashX, limit=limit) + return hc[hashX] + + async def _notify_sessions(self, height, touched): + """Notify sessions about height changes and touched addresses.""" + height_changed = height != self.notified_height + if height_changed: + await self._refresh_hsub_results(height) + # Invalidate our history cache for touched hashXs + hc = self.history_cache + for hashX in set(hc).intersection(touched): + del hc[hashX] + + if self.sessions: + await asyncio.wait([ + session.notify(touched, height_changed) for session in self.sessions + ]) + + def add_session(self, session): + self.sessions.add(session) + self.session_event.set() + gid = int(session.start_time - self.start_time) // 900 + if self.cur_group.gid != gid: + self.cur_group = SessionGroup(gid) + return self.cur_group + + def remove_session(self, session): + """Remove a session from our sessions list if there.""" + self.sessions.remove(session) + self.session_event.set() + + +class SessionBase(RPCSession): + """Base class of ElectrumX JSON sessions. + + Each session runs its tasks in asynchronous parallelism with other + sessions. + """ + + MAX_CHUNK_SIZE = 40960 + session_counter = itertools.count() + request_handlers: typing.Dict[str, typing.Callable] = {} + version = '0.5.7' + + def __init__(self, session_mgr, db, mempool, peer_mgr, kind): + connection = JSONRPCConnection(JSONRPCAutoDetect) + super().__init__(connection=connection) + self.logger = util.class_logger(__name__, self.__class__.__name__) + self.session_mgr = session_mgr + self.db = db + self.mempool = mempool + self.peer_mgr = peer_mgr + self.kind = kind # 'RPC', 'TCP' etc. + self.env = session_mgr.env + self.coin = self.env.coin + self.client = 'unknown' + self.anon_logs = self.env.anon_logs + self.txs_sent = 0 + self.log_me = False + self.daemon_request = self.session_mgr.daemon_request + # Hijack the connection so we can log messages + self._receive_message_orig = self.connection.receive_message + self.connection.receive_message = self.receive_message + + async def notify(self, touched, height_changed): + pass + + def peer_address_str(self, *, for_log=True): + """Returns the peer's IP address and port as a human-readable + string, respecting anon logs if the output is for a log.""" + if for_log and self.anon_logs: + return 'xx.xx.xx.xx:xx' + return super().peer_address_str() + + def receive_message(self, message): + if self.log_me: + self.logger.info(f'processing {message}') + return self._receive_message_orig(message) + + def toggle_logging(self): + self.log_me = not self.log_me + + def flags(self): + """Status flags.""" + status = self.kind[0] + if self.is_closing(): + status += 'C' + if self.log_me: + status += 'L' + status += str(self._concurrency.max_concurrent) + return status + + def connection_made(self, transport): + """Handle an incoming client connection.""" + super().connection_made(transport) + self.session_id = next(self.session_counter) + context = {'conn_id': f'{self.session_id}'} + self.logger = util.ConnectionLogger(self.logger, context) + self.group = self.session_mgr.add_session(self) + self.logger.info(f'{self.kind} {self.peer_address_str()}, ' + f'{self.session_mgr.session_count():,d} total') + + def connection_lost(self, exc): + """Handle client disconnection.""" + super().connection_lost(exc) + self.session_mgr.remove_session(self) + msg = '' + if not self._can_send.is_set(): + msg += ' whilst paused' + if self._concurrency.max_concurrent != self.max_concurrent: + msg += ' whilst throttled' + if self.send_size >= 1024*1024: + msg += ('. Sent {:,d} bytes in {:,d} messages' + .format(self.send_size, self.send_count)) + if msg: + msg = 'disconnected' + msg + self.logger.info(msg) + + def count_pending_items(self): + return len(self.connection.pending_requests()) + + def semaphore(self): + return Semaphores([self._concurrency.semaphore, self.group.semaphore]) + + def sub_count(self): + return 0 + + async def handle_request(self, request): + """Handle an incoming request. ElectrumX doesn't receive + notifications from client sessions. + """ + if isinstance(request, Request): + handler = self.request_handlers.get(request.method) + else: + handler = None + coro = handler_invocation(handler, request)() + return await coro + + +class ElectrumX(SessionBase): + """A TCP server that handles incoming Electrum connections.""" + + PROTOCOL_MIN = (1, 1) + PROTOCOL_MAX = (1, 4) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.subscribe_headers = False + self.subscribe_headers_raw = False + self.connection.max_response_size = self.env.max_send + self.hashX_subs = {} + self.sv_seen = False + self.mempool_statuses = {} + self.set_request_handlers(self.PROTOCOL_MIN) + + @classmethod + def protocol_min_max_strings(cls): + return [util.version_string(ver) + for ver in (cls.PROTOCOL_MIN, cls.PROTOCOL_MAX)] + + @classmethod + def server_features(cls, env): + """Return the server features dictionary.""" + min_str, max_str = cls.protocol_min_max_strings() + return { + 'hosts': env.hosts_dict(), + 'pruning': None, + 'server_version': cls.version, + 'protocol_min': min_str, + 'protocol_max': max_str, + 'genesis_hash': env.coin.GENESIS_HASH, + 'description': env.description, + 'payment_address': env.donation_address, + 'daily_fee': env.daily_fee, + 'hash_function': 'sha256', + } + + async def server_features_async(self): + return self.server_features(self.env) + + @classmethod + def server_version_args(cls): + """The arguments to a server.version RPC call to a peer.""" + return [cls.version, cls.protocol_min_max_strings()] + + def protocol_version_string(self): + return util.version_string(self.protocol_tuple) + + def sub_count(self): + return len(self.hashX_subs) + + async def notify(self, touched, height_changed): + """Notify the client about changes to touched addresses (from mempool + updates or new blocks) and height. + """ + if height_changed and self.subscribe_headers: + args = (await self.subscribe_headers_result(), ) + await self.send_notification('blockchain.headers.subscribe', args) + + touched = touched.intersection(self.hashX_subs) + if touched or (height_changed and self.mempool_statuses): + changed = {} + + for hashX in touched: + alias = self.hashX_subs[hashX] + status = await self.address_status(hashX) + changed[alias] = status + + # Check mempool hashXs - the status is a function of the + # confirmed state of other transactions. Note: we cannot + # iterate over mempool_statuses as it changes size. + for hashX in tuple(self.mempool_statuses): + # Items can be evicted whilst await-ing status; False + # ensures such hashXs are notified + old_status = self.mempool_statuses.get(hashX, False) + status = await self.address_status(hashX) + if status != old_status: + alias = self.hashX_subs[hashX] + changed[alias] = status + + for alias, status in changed.items(): + if len(alias) == 64: + method = 'blockchain.scripthash.subscribe' + else: + method = 'blockchain.address.subscribe' + await self.send_notification(method, (alias, status)) + + if changed: + es = '' if len(changed) == 1 else 'es' + self.logger.info(f'notified of {len(changed):,d} address{es}') + + async def subscribe_headers_result(self): + """The result of a header subscription or notification.""" + return self.session_mgr.hsub_results[self.subscribe_headers_raw] + + async def _headers_subscribe(self, raw): + """Subscribe to get headers of new blocks.""" + self.subscribe_headers_raw = assert_boolean(raw) + self.subscribe_headers = True + return await self.subscribe_headers_result() + + async def headers_subscribe(self): + """Subscribe to get raw headers of new blocks.""" + return await self._headers_subscribe(True) + + async def headers_subscribe_True(self, raw=True): + """Subscribe to get headers of new blocks.""" + return await self._headers_subscribe(raw) + + async def headers_subscribe_False(self, raw=False): + """Subscribe to get headers of new blocks.""" + return await self._headers_subscribe(raw) + + async def add_peer(self, features): + """Add a peer (but only if the peer resolves to the source).""" + return await self.peer_mgr.on_add_peer(features, self.peer_address()) + + async def peers_subscribe(self): + """Return the server peers as a list of (ip, host, details) tuples.""" + return self.peer_mgr.on_peers_subscribe(self.is_tor()) + + async def address_status(self, hashX): + """Returns an address status. + + Status is a hex string, but must be None if there is no history. + """ + # Note history is ordered and mempool unordered in electrum-server + # For mempool, height is -1 if it has unconfirmed inputs, otherwise 0 + db_history = await self.session_mgr.limited_history(hashX) + mempool = await self.mempool.transaction_summaries(hashX) + + status = ''.join(f'{hash_to_hex_str(tx_hash)}:' + f'{height:d}:' + for tx_hash, height in db_history) + status += ''.join(f'{hash_to_hex_str(tx.hash)}:' + f'{-tx.has_unconfirmed_inputs:d}:' + for tx in mempool) + if status: + status = sha256(status.encode()).hex() + else: + status = None + + if mempool: + self.mempool_statuses[hashX] = status + else: + self.mempool_statuses.pop(hashX, None) + + return status + + async def hashX_listunspent(self, hashX): + """Return the list of UTXOs of a script hash, including mempool + effects.""" + utxos = await self.db.all_utxos(hashX) + utxos = sorted(utxos) + utxos.extend(await self.mempool.unordered_UTXOs(hashX)) + spends = await self.mempool.potential_spends(hashX) + + return [{'tx_hash': hash_to_hex_str(utxo.tx_hash), + 'tx_pos': utxo.tx_pos, + 'height': utxo.height, 'value': utxo.value} + for utxo in utxos + if (utxo.tx_hash, utxo.tx_pos) not in spends] + + async def hashX_subscribe(self, hashX, alias): + self.hashX_subs[hashX] = alias + return await self.address_status(hashX) + + async def hashX_unsubscribe(self, hashX, alias): + del self.hashX_subs[hashX] + + def address_to_hashX(self, address): + try: + return self.coin.address_to_hashX(address) + except Exception: + pass + raise RPCError(BAD_REQUEST, f'{address} is not a valid address') + + async def address_get_balance(self, address): + """Return the confirmed and unconfirmed balance of an address.""" + hashX = self.address_to_hashX(address) + return await self.get_balance(hashX) + + async def address_get_history(self, address): + """Return the confirmed and unconfirmed history of an address.""" + hashX = self.address_to_hashX(address) + return await self.confirmed_and_unconfirmed_history(hashX) + + async def address_get_mempool(self, address): + """Return the mempool transactions touching an address.""" + hashX = self.address_to_hashX(address) + return await self.unconfirmed_history(hashX) + + async def address_listunspent(self, address): + """Return the list of UTXOs of an address.""" + hashX = self.address_to_hashX(address) + return await self.hashX_listunspent(hashX) + + async def address_subscribe(self, address): + """Subscribe to an address. + + address: the address to subscribe to""" + hashX = self.address_to_hashX(address) + return await self.hashX_subscribe(hashX, address) + + async def address_unsubscribe(self, address): + """Unsubscribe an address. + + address: the address to unsubscribe""" + hashX = self.address_to_hashX(address) + return await self.hashX_unsubscribe(hashX, address) + + async def get_balance(self, hashX): + utxos = await self.db.all_utxos(hashX) + confirmed = sum(utxo.value for utxo in utxos) + unconfirmed = await self.mempool.balance_delta(hashX) + return {'confirmed': confirmed, 'unconfirmed': unconfirmed} + + async def scripthash_get_balance(self, scripthash): + """Return the confirmed and unconfirmed balance of a scripthash.""" + hashX = scripthash_to_hashX(scripthash) + return await self.get_balance(hashX) + + async def unconfirmed_history(self, hashX): + # Note unconfirmed history is unordered in electrum-server + # height is -1 if it has unconfirmed inputs, otherwise 0 + return [{'tx_hash': hash_to_hex_str(tx.hash), + 'height': -tx.has_unconfirmed_inputs, + 'fee': tx.fee} + for tx in await self.mempool.transaction_summaries(hashX)] + + async def confirmed_and_unconfirmed_history(self, hashX): + # Note history is ordered but unconfirmed is unordered in e-s + history = await self.session_mgr.limited_history(hashX) + conf = [{'tx_hash': hash_to_hex_str(tx_hash), 'height': height} + for tx_hash, height in history] + return conf + await self.unconfirmed_history(hashX) + + async def scripthash_get_history(self, scripthash): + """Return the confirmed and unconfirmed history of a scripthash.""" + hashX = scripthash_to_hashX(scripthash) + return await self.confirmed_and_unconfirmed_history(hashX) + + async def scripthash_get_mempool(self, scripthash): + """Return the mempool transactions touching a scripthash.""" + hashX = scripthash_to_hashX(scripthash) + return await self.unconfirmed_history(hashX) + + async def scripthash_listunspent(self, scripthash): + """Return the list of UTXOs of a scripthash.""" + hashX = scripthash_to_hashX(scripthash) + return await self.hashX_listunspent(hashX) + + async def scripthash_subscribe(self, scripthash): + """Subscribe to a script hash. + + scripthash: the SHA256 hash of the script to subscribe to""" + hashX = scripthash_to_hashX(scripthash) + return await self.hashX_subscribe(hashX, scripthash) + + async def _merkle_proof(self, cp_height, height): + max_height = self.db.db_height + if not height <= cp_height <= max_height: + raise RPCError(BAD_REQUEST, + f'require header height {height:,d} <= ' + f'cp_height {cp_height:,d} <= ' + f'chain height {max_height:,d}') + branch, root = await self.db.header_branch_and_root(cp_height + 1, + height) + return { + 'branch': [hash_to_hex_str(elt) for elt in branch], + 'root': hash_to_hex_str(root), + } + + async def block_header(self, height, cp_height=0): + """Return a raw block header as a hexadecimal string, or as a + dictionary with a merkle proof.""" + height = non_negative_integer(height) + cp_height = non_negative_integer(cp_height) + raw_header_hex = (await self.session_mgr.raw_header(height)).hex() + if cp_height == 0: + return raw_header_hex + result = {'header': raw_header_hex} + result.update(await self._merkle_proof(cp_height, height)) + return result + + async def block_header_13(self, height): + """Return a raw block header as a hexadecimal string. + + height: the header's height""" + return await self.block_header(height) + + async def block_headers(self, start_height, count, cp_height=0, b64=False): + """Return count concatenated block headers as hex for the main chain; + starting at start_height. + + start_height and count must be non-negative integers. At most + MAX_CHUNK_SIZE headers will be returned. + """ + start_height = non_negative_integer(start_height) + count = non_negative_integer(count) + cp_height = non_negative_integer(cp_height) + + max_size = self.MAX_CHUNK_SIZE + count = min(count, max_size) + headers, count = await self.db.read_headers(start_height, count) + compressobj = zlib.compressobj(wbits=-15, level=1, memLevel=9) + headers = base64.b64encode(compressobj.compress(headers) + compressobj.flush()).decode() if b64 else headers.hex() + result = { + 'base64' if b64 else 'hex': headers, + 'count': count, + 'max': max_size + } + if count and cp_height: + last_height = start_height + count - 1 + result.update(await self._merkle_proof(cp_height, last_height)) + return result + + async def block_get_chunk(self, index): + """Return a chunk of block headers as a hexadecimal string. + + index: the chunk index""" + index = non_negative_integer(index) + size = self.coin.CHUNK_SIZE + start_height = index * size + headers, _ = await self.db.read_headers(start_height, size) + return headers.hex() + + async def block_get_header(self, height): + """The deserialized header at a given height. + + height: the header's height""" + height = non_negative_integer(height) + return await self.session_mgr.electrum_header(height) + + def is_tor(self): + """Try to detect if the connection is to a tor hidden service we are + running.""" + peername = self.peer_mgr.proxy_peername() + if not peername: + return False + peer_address = self.peer_address() + return peer_address and peer_address[0] == peername[0] + + async def replaced_banner(self, banner): + network_info = await self.daemon_request('getnetworkinfo') + ni_version = network_info['version'] + major, minor = divmod(ni_version, 1000000) + minor, revision = divmod(minor, 10000) + revision //= 100 + daemon_version = f'{major:d}.{minor:d}.{revision:d}' + for pair in [ + ('$SERVER_VERSION', self.version), + ('$DAEMON_VERSION', daemon_version), + ('$DAEMON_SUBVERSION', network_info['subversion']), + ('$DONATION_ADDRESS', self.env.donation_address), + ]: + banner = banner.replace(*pair) + return banner + + async def donation_address(self): + """Return the donation address as a string, empty if there is none.""" + return self.env.donation_address + + async def banner(self): + """Return the server banner text.""" + banner = f'You are connected to an {self.version} server.' + + if self.is_tor(): + banner_file = self.env.tor_banner_file + else: + banner_file = self.env.banner_file + if banner_file: + try: + with codecs.open(banner_file, 'r', 'utf-8') as f: + banner = f.read() + except Exception as e: + self.logger.error(f'reading banner file {banner_file}: {e!r}') + else: + banner = await self.replaced_banner(banner) + + return banner + + async def relayfee(self): + """The minimum fee a low-priority tx must pay in order to be accepted + to the daemon's memory pool.""" + return await self.daemon_request('relayfee') + + async def estimatefee(self, number): + """The estimated transaction fee per kilobyte to be paid for a + transaction to be included within a certain number of blocks. + + number: the number of blocks + """ + number = non_negative_integer(number) + return await self.daemon_request('estimatefee', number) + + async def ping(self): + """Serves as a connection keep-alive mechanism and for the client to + confirm the server is still responding. + """ + return None + + async def server_version(self, client_name='', protocol_version=None): + """Returns the server version as a string. + + client_name: a string identifying the client + protocol_version: the protocol version spoken by the client + """ + if self.sv_seen and self.protocol_tuple >= (1, 4): + raise RPCError(BAD_REQUEST, f'server.version already sent') + self.sv_seen = True + + if client_name: + client_name = str(client_name) + if self.env.drop_client is not None and \ + self.env.drop_client.match(client_name): + self.close_after_send = True + raise RPCError(BAD_REQUEST, + f'unsupported client: {client_name}') + self.client = client_name[:17] + + # Find the highest common protocol version. Disconnect if + # that protocol version in unsupported. + ptuple, client_min = util.protocol_version( + protocol_version, self.PROTOCOL_MIN, self.PROTOCOL_MAX) + if ptuple is None: + if client_min > self.PROTOCOL_MIN: + self.logger.info(f'client requested future protocol version ' + f'{util.version_string(client_min)} ' + f'- is your software out of date?') + self.close_after_send = True + raise RPCError(BAD_REQUEST, + f'unsupported protocol version: {protocol_version}') + self.set_request_handlers(ptuple) + + return self.version, self.protocol_version_string() + + async def transaction_broadcast(self, raw_tx): + """Broadcast a raw transaction to the network. + + raw_tx: the raw transaction as a hexadecimal string""" + # This returns errors as JSON RPC errors, as is natural + try: + hex_hash = await self.session_mgr.broadcast_transaction(raw_tx) + self.txs_sent += 1 + self.logger.info(f'sent tx: {hex_hash}') + return hex_hash + except DaemonError as e: + error, = e.args + message = error['message'] + self.logger.info(f'error sending transaction: {message}') + raise RPCError(BAD_REQUEST, 'the transaction was rejected by ' + f'network rules.\n\n{message}\n[{raw_tx}]') + + async def transaction_get(self, tx_hash, verbose=False): + """Return the serialized raw transaction given its hash + + tx_hash: the transaction hash as a hexadecimal string + verbose: passed on to the daemon + """ + assert_tx_hash(tx_hash) + if verbose not in (True, False): + raise RPCError(BAD_REQUEST, f'"verbose" must be a boolean') + + return await self.daemon_request('getrawtransaction', tx_hash, verbose) + + async def _block_hash_and_tx_hashes(self, height): + """Returns a pair (block_hash, tx_hashes) for the main chain block at + the given height. + + block_hash is a hexadecimal string, and tx_hashes is an + ordered list of hexadecimal strings. + """ + height = non_negative_integer(height) + hex_hashes = await self.daemon_request('block_hex_hashes', height, 1) + block_hash = hex_hashes[0] + block = await self.daemon_request('deserialised_block', block_hash) + return block_hash, block['tx'] + + def _get_merkle_branch(self, tx_hashes, tx_pos): + """Return a merkle branch to a transaction. + + tx_hashes: ordered list of hex strings of tx hashes in a block + tx_pos: index of transaction in tx_hashes to create branch for + """ + hashes = [hex_str_to_hash(hash) for hash in tx_hashes] + branch, root = self.db.merkle.branch_and_root(hashes, tx_pos) + branch = [hash_to_hex_str(hash) for hash in branch] + return branch + + async def transaction_merkle(self, tx_hash, height): + """Return the markle branch to a confirmed transaction given its hash + and height. + + tx_hash: the transaction hash as a hexadecimal string + height: the height of the block it is in + """ + assert_tx_hash(tx_hash) + block_hash, tx_hashes = await self._block_hash_and_tx_hashes(height) + try: + pos = tx_hashes.index(tx_hash) + except ValueError: + raise RPCError(BAD_REQUEST, f'tx hash {tx_hash} not in ' + f'block {block_hash} at height {height:,d}') + branch = self._get_merkle_branch(tx_hashes, pos) + return {"block_height": height, "merkle": branch, "pos": pos} + + async def transaction_id_from_pos(self, height, tx_pos, merkle=False): + """Return the txid and optionally a merkle proof, given + a block height and position in the block. + """ + tx_pos = non_negative_integer(tx_pos) + if merkle not in (True, False): + raise RPCError(BAD_REQUEST, f'"merkle" must be a boolean') + + block_hash, tx_hashes = await self._block_hash_and_tx_hashes(height) + try: + tx_hash = tx_hashes[tx_pos] + except IndexError: + raise RPCError(BAD_REQUEST, f'no tx at position {tx_pos:,d} in ' + f'block {block_hash} at height {height:,d}') + + if merkle: + branch = self._get_merkle_branch(tx_hashes, tx_pos) + return {"tx_hash": tx_hash, "merkle": branch} + else: + return tx_hash + + def set_request_handlers(self, ptuple): + self.protocol_tuple = ptuple + + handlers = { + 'blockchain.block.get_chunk': self.block_get_chunk, + 'blockchain.block.get_header': self.block_get_header, + 'blockchain.estimatefee': self.estimatefee, + 'blockchain.relayfee': self.relayfee, + 'blockchain.scripthash.get_balance': self.scripthash_get_balance, + 'blockchain.scripthash.get_history': self.scripthash_get_history, + 'blockchain.scripthash.get_mempool': self.scripthash_get_mempool, + 'blockchain.scripthash.listunspent': self.scripthash_listunspent, + 'blockchain.scripthash.subscribe': self.scripthash_subscribe, + 'blockchain.transaction.broadcast': self.transaction_broadcast, + 'blockchain.transaction.get': self.transaction_get, + 'blockchain.transaction.get_merkle': self.transaction_merkle, + 'server.add_peer': self.add_peer, + 'server.banner': self.banner, + 'server.donation_address': self.donation_address, + 'server.features': self.server_features_async, + 'server.peers.subscribe': self.peers_subscribe, + 'server.version': self.server_version, + } + + if ptuple >= (1, 2): + # New handler as of 1.2 + handlers.update({ + 'mempool.get_fee_histogram': + self.mempool.compact_fee_histogram, + 'blockchain.block.headers': self.block_headers, + 'server.ping': self.ping, + }) + + if ptuple >= (1, 4): + handlers.update({ + 'blockchain.block.header': self.block_header, + 'blockchain.block.headers': self.block_headers, + 'blockchain.headers.subscribe': self.headers_subscribe, + 'blockchain.transaction.id_from_pos': + self.transaction_id_from_pos, + }) + elif ptuple >= (1, 3): + handlers.update({ + 'blockchain.block.header': self.block_header_13, + 'blockchain.headers.subscribe': self.headers_subscribe_True, + }) + else: + handlers.update({ + 'blockchain.headers.subscribe': self.headers_subscribe_False, + 'blockchain.address.get_balance': self.address_get_balance, + 'blockchain.address.get_history': self.address_get_history, + 'blockchain.address.get_mempool': self.address_get_mempool, + 'blockchain.address.listunspent': self.address_listunspent, + 'blockchain.address.subscribe': self.address_subscribe, + 'blockchain.address.unsubscribe': self.address_unsubscribe, + }) + + self.request_handlers = handlers + + +class LocalRPC(SessionBase): + """A local TCP RPC server session.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.client = 'RPC' + self.connection._max_response_size = 0 + + def protocol_version_string(self): + return 'RPC' + + +class DashElectrumX(ElectrumX): + """A TCP server that handles incoming Electrum Dash connections.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.mns = set() + + def set_request_handlers(self, ptuple): + super().set_request_handlers(ptuple) + self.request_handlers.update({ + 'masternode.announce.broadcast': + self.masternode_announce_broadcast, + 'masternode.subscribe': self.masternode_subscribe, + 'masternode.list': self.masternode_list + }) + + async def notify(self, touched, height_changed): + """Notify the client about changes in masternode list.""" + await super().notify(touched, height_changed) + for mn in self.mns: + status = await self.daemon_request('masternode_list', + ['status', mn]) + await self.send_notification('masternode.subscribe', + [mn, status.get(mn)]) + + # Masternode command handlers + async def masternode_announce_broadcast(self, signmnb): + """Pass through the masternode announce message to be broadcast + by the daemon. + + signmnb: signed masternode broadcast message.""" + try: + return await self.daemon_request('masternode_broadcast', + ['relay', signmnb]) + except DaemonError as e: + error, = e.args + message = error['message'] + self.logger.info(f'masternode_broadcast: {message}') + raise RPCError(BAD_REQUEST, 'the masternode broadcast was ' + f'rejected.\n\n{message}\n[{signmnb}]') + + async def masternode_subscribe(self, collateral): + """Returns the status of masternode. + + collateral: masternode collateral. + """ + result = await self.daemon_request('masternode_list', + ['status', collateral]) + if result is not None: + self.mns.add(collateral) + return result.get(collateral) + return None + + async def masternode_list(self, payees): + """ + Returns the list of masternodes. + + payees: a list of masternode payee addresses. + """ + if not isinstance(payees, list): + raise RPCError(BAD_REQUEST, 'expected a list of payees') + + def get_masternode_payment_queue(mns): + """Returns the calculated position in the payment queue for all the + valid masterernodes in the given mns list. + + mns: a list of masternodes information. + """ + now = int(datetime.datetime.utcnow().strftime("%s")) + mn_queue = [] + + # Only ENABLED masternodes are considered for the list. + for line in mns: + mnstat = mns[line].split() + if mnstat[0] == 'ENABLED': + # if last paid time == 0 + if int(mnstat[5]) == 0: + # use active seconds + mnstat.append(int(mnstat[4])) + else: + # now minus last paid + delta = now - int(mnstat[5]) + # if > active seconds, use active seconds + if delta >= int(mnstat[4]): + mnstat.append(int(mnstat[4])) + # use active seconds + else: + mnstat.append(delta) + mn_queue.append(mnstat) + mn_queue = sorted(mn_queue, key=lambda x: x[8], reverse=True) + return mn_queue + + def get_payment_position(payment_queue, address): + """ + Returns the position of the payment list for the given address. + + payment_queue: position in the payment queue for the masternode. + address: masternode payee address. + """ + position = -1 + for pos, mn in enumerate(payment_queue, start=1): + if mn[2] == address: + position = pos + break + return position + + # Accordingly with the masternode payment queue, a custom list + # with the masternode information including the payment + # position is returned. + cache = self.session_mgr.mn_cache + if not cache or self.session_mgr.mn_cache_height != self.db.db_height: + full_mn_list = await self.daemon_request('masternode_list', + ['full']) + mn_payment_queue = get_masternode_payment_queue(full_mn_list) + mn_payment_count = len(mn_payment_queue) + mn_list = [] + for key, value in full_mn_list.items(): + mn_data = value.split() + mn_info = {} + mn_info['vin'] = key + mn_info['status'] = mn_data[0] + mn_info['protocol'] = mn_data[1] + mn_info['payee'] = mn_data[2] + mn_info['lastseen'] = mn_data[3] + mn_info['activeseconds'] = mn_data[4] + mn_info['lastpaidtime'] = mn_data[5] + mn_info['lastpaidblock'] = mn_data[6] + mn_info['ip'] = mn_data[7] + mn_info['paymentposition'] = get_payment_position( + mn_payment_queue, mn_info['payee']) + mn_info['inselection'] = ( + mn_info['paymentposition'] < mn_payment_count // 10) + balance = await self.address_get_balance(mn_info['payee']) + mn_info['balance'] = (sum(balance.values()) + / self.coin.VALUE_PER_COIN) + mn_list.append(mn_info) + cache.clear() + cache.extend(mn_list) + self.session_mgr.mn_cache_height = self.db.db_height + + # If payees is an empty list the whole masternode list is returned + if payees: + return [mn for mn in cache if mn['payee'] in payees] + else: + return cache + class ResultCacheItem: __slots__ = '_result', 'lock', 'has_result' diff --git a/torba/torba/server/storage.py b/lbry/lbry/wallet/server/storage.py similarity index 100% rename from torba/torba/server/storage.py rename to lbry/lbry/wallet/server/storage.py diff --git a/torba/torba/server/text.py b/lbry/lbry/wallet/server/text.py similarity index 100% rename from torba/torba/server/text.py rename to lbry/lbry/wallet/server/text.py diff --git a/torba/torba/server/tx.py b/lbry/lbry/wallet/server/tx.py similarity index 100% rename from torba/torba/server/tx.py rename to lbry/lbry/wallet/server/tx.py diff --git a/torba/torba/server/util.py b/lbry/lbry/wallet/server/util.py similarity index 100% rename from torba/torba/server/util.py rename to lbry/lbry/wallet/server/util.py diff --git a/torba/torba/stream.py b/lbry/lbry/wallet/stream.py similarity index 100% rename from torba/torba/stream.py rename to lbry/lbry/wallet/stream.py diff --git a/torba/torba/tasks.py b/lbry/lbry/wallet/tasks.py similarity index 100% rename from torba/torba/tasks.py rename to lbry/lbry/wallet/tasks.py diff --git a/torba/torba/testcase.py b/lbry/lbry/wallet/testcase.py similarity index 100% rename from torba/torba/testcase.py rename to lbry/lbry/wallet/testcase.py diff --git a/torba/setup.cfg b/lbry/setup.cfg similarity index 100% rename from torba/setup.cfg rename to lbry/setup.cfg diff --git a/lbry/setup.py b/lbry/setup.py index 3a23a0250..904f9b46c 100644 --- a/lbry/setup.py +++ b/lbry/setup.py @@ -1,9 +1,15 @@ import os +import sys from lbry import __name__, __version__ from setuptools import setup, find_packages BASE = os.path.dirname(__file__) -README_PATH = os.path.join(BASE, 'README.md') +with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh: + long_description = fh.read() + +PLYVEL = [] +if sys.platform.startswith('linux'): + PLYVEL.append('plyvel==1.0.5') setup( name=__name__, @@ -12,7 +18,7 @@ setup( author_email="hello@lbry.com", url="https://lbry.com", description="A decentralized media library and marketplace", - long_description=open(README_PATH, encoding='utf-8').read(), + long_description=long_description, long_description_content_type="text/markdown", keywords="lbry protocol media", license='MIT', @@ -20,10 +26,12 @@ setup( packages=find_packages(exclude=('tests',)), zip_safe=False, entry_points={ - 'console_scripts': 'lbrynet=lbry.extras.cli:main' + 'console_scripts': [ + 'lbrynet=lbry.extras.cli:main', + 'torba-server=torba.server.cli:main', + ], }, install_requires=[ - 'torba', 'aiohttp==3.5.4', 'aioupnp==0.0.16', 'appdirs==1.4.3', @@ -40,5 +48,22 @@ setup( 'docopt==0.6.2', 'hachoir', 'multidict==4.6.1', + 'coincurve==11.0.0', + 'pbkdf2==1.3', + 'attrs==18.2.0', + 'pylru==1.1.0' + ] + PLYVEL, + classifiers=[ + 'Framework :: AsyncIO', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3', + 'Operating System :: OS Independent', + 'Topic :: Internet', + 'Topic :: Software Development :: Testing', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: System :: Distributed Computing', + 'Topic :: Utilities', ], ) diff --git a/torba/tests/client_tests/integration/test_blockchain_reorganization.py b/lbry/tests/integration/test_blockchain_reorganization.py similarity index 100% rename from torba/tests/client_tests/integration/test_blockchain_reorganization.py rename to lbry/tests/integration/test_blockchain_reorganization.py diff --git a/torba/tests/client_tests/integration/test_network.py b/lbry/tests/integration/test_network.py similarity index 100% rename from torba/tests/client_tests/integration/test_network.py rename to lbry/tests/integration/test_network.py diff --git a/torba/tests/client_tests/integration/test_sync.py b/lbry/tests/integration/test_sync.py similarity index 100% rename from torba/tests/client_tests/integration/test_sync.py rename to lbry/tests/integration/test_sync.py diff --git a/torba/tests/client_tests/integration/test_transactions.py b/lbry/tests/integration/test_transactions.py similarity index 100% rename from torba/tests/client_tests/integration/test_transactions.py rename to lbry/tests/integration/test_transactions.py diff --git a/torba/tests/client_tests/unit/key_fixtures.py b/lbry/tests/unit/wallet/key_fixtures.py similarity index 100% rename from torba/tests/client_tests/unit/key_fixtures.py rename to lbry/tests/unit/wallet/key_fixtures.py diff --git a/lbry/tests/unit/wallet/test_account.py b/lbry/tests/unit/wallet/test_account.py index 54766cda4..065d43ab9 100644 --- a/lbry/tests/unit/wallet/test_account.py +++ b/lbry/tests/unit/wallet/test_account.py @@ -40,6 +40,63 @@ class TestAccount(AsyncioTestCase): addresses = await account.change.get_addresses() self.assertEqual(len(addresses), 6) + async def test_generate_keys_over_batch_threshold_saves_it_properly(self): + async with self.account.receiving.address_generator_lock: + await self.account.receiving._generate_keys(0, 200) + records = await self.account.receiving.get_address_records() + self.assertEqual(len(records), 201) + + async def test_ensure_address_gap(self): + account = self.account + + self.assertIsInstance(account.receiving, HierarchicalDeterministic) + + async with account.receiving.address_generator_lock: + await account.receiving._generate_keys(4, 7) + await account.receiving._generate_keys(0, 3) + await account.receiving._generate_keys(8, 11) + records = await account.receiving.get_address_records() + self.assertListEqual( + [r['pubkey'].n for r in records], + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + ) + + # we have 12, but default gap is 20 + new_keys = await account.receiving.ensure_address_gap() + self.assertEqual(len(new_keys), 8) + records = await account.receiving.get_address_records() + self.assertListEqual( + [r['pubkey'].n for r in records], + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] + ) + + # case #1: no new addresses needed + empty = await account.receiving.ensure_address_gap() + self.assertEqual(len(empty), 0) + + # case #2: only one new addressed needed + records = await account.receiving.get_address_records() + await self.ledger.db.set_address_history(records[0]['address'], 'a:1:') + new_keys = await account.receiving.ensure_address_gap() + self.assertEqual(len(new_keys), 1) + + # case #3: 20 addresses needed + await self.ledger.db.set_address_history(new_keys[0], 'a:1:') + new_keys = await account.receiving.ensure_address_gap() + self.assertEqual(len(new_keys), 20) + + async def test_get_or_create_usable_address(self): + account = self.account + + keys = await account.receiving.get_addresses() + self.assertEqual(len(keys), 0) + + address = await account.receiving.get_or_create_usable_address() + self.assertIsNotNone(address) + + keys = await account.receiving.get_addresses() + self.assertEqual(len(keys), 20) + async def test_generate_account_from_seed(self): account = Account.from_dict( self.ledger, Wallet(), { @@ -74,7 +131,7 @@ class TestAccount(AsyncioTestCase): ) self.assertIsNone(private_key) - def test_load_and_save_account(self): + async def test_load_and_save_account(self): account_data = { 'name': 'Main Account', 'modified_on': 123.456, @@ -97,6 +154,14 @@ class TestAccount(AsyncioTestCase): } account = Account.from_dict(self.ledger, Wallet(), account_data) + + await account.ensure_address_gap() + + addresses = await account.receiving.get_addresses() + self.assertEqual(len(addresses), 5) + addresses = await account.change.get_addresses() + self.assertEqual(len(addresses), 5) + account_data['ledger'] = 'lbc_mainnet' self.assertDictEqual(account_data, account.to_dict()) @@ -116,3 +181,317 @@ class TestAccount(AsyncioTestCase): # doesn't fail for single-address account account2 = Account.generate(self.ledger, Wallet(), 'lbryum', {'name': 'single-address'}) await account2.save_max_gap() + + def test_merge_diff(self): + account_data = { + 'name': 'My Account', + 'modified_on': 123.456, + 'seed': + "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" + "h absent", + 'encrypted': False, + 'private_key': + 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' + '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', + 'public_key': + 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' + 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', + 'address_generator': { + 'name': 'deterministic-chain', + 'receiving': {'gap': 5, 'maximum_uses_per_address': 2}, + 'change': {'gap': 5, 'maximum_uses_per_address': 2} + } + } + account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) + + self.assertEqual(account.name, 'My Account') + self.assertEqual(account.modified_on, 123.456) + self.assertEqual(account.change.gap, 5) + self.assertEqual(account.change.maximum_uses_per_address, 2) + self.assertEqual(account.receiving.gap, 5) + self.assertEqual(account.receiving.maximum_uses_per_address, 2) + + account_data['name'] = 'Changed Name' + account_data['address_generator']['change']['gap'] = 6 + account_data['address_generator']['change']['maximum_uses_per_address'] = 7 + account_data['address_generator']['receiving']['gap'] = 8 + account_data['address_generator']['receiving']['maximum_uses_per_address'] = 9 + + account.merge(account_data) + # no change because modified_on is not newer + self.assertEqual(account.name, 'My Account') + + account_data['modified_on'] = 200.00 + + account.merge(account_data) + self.assertEqual(account.name, 'Changed Name') + self.assertEqual(account.change.gap, 6) + self.assertEqual(account.change.maximum_uses_per_address, 7) + self.assertEqual(account.receiving.gap, 8) + self.assertEqual(account.receiving.maximum_uses_per_address, 9) + + +class TestSingleKeyAccount(AsyncioTestCase): + + async def asyncSetUp(self): + self.ledger = ledger_class({ + 'db': ledger_class.database_class(':memory:'), + 'headers': ledger_class.headers_class(':memory:'), + }) + await self.ledger.db.open() + self.account = self.ledger.account_class.generate( + self.ledger, Wallet(), "torba", {'name': 'single-address'}) + + async def asyncTearDown(self): + await self.ledger.db.close() + + async def test_generate_account(self): + account = self.account + + self.assertEqual(account.ledger, self.ledger) + self.assertIsNotNone(account.seed) + self.assertEqual(account.public_key.ledger, self.ledger) + self.assertEqual(account.private_key.public_key, account.public_key) + + addresses = await account.receiving.get_addresses() + self.assertEqual(len(addresses), 0) + addresses = await account.change.get_addresses() + self.assertEqual(len(addresses), 0) + + await account.ensure_address_gap() + + addresses = await account.receiving.get_addresses() + self.assertEqual(len(addresses), 1) + self.assertEqual(addresses[0], account.public_key.address) + addresses = await account.change.get_addresses() + self.assertEqual(len(addresses), 1) + self.assertEqual(addresses[0], account.public_key.address) + + addresses = await account.get_addresses() + self.assertEqual(len(addresses), 1) + self.assertEqual(addresses[0], account.public_key.address) + + async def test_ensure_address_gap(self): + account = self.account + + self.assertIsInstance(account.receiving, SingleKey) + addresses = await account.receiving.get_addresses() + self.assertListEqual(addresses, []) + + # we have 12, but default gap is 20 + new_keys = await account.receiving.ensure_address_gap() + self.assertEqual(len(new_keys), 1) + self.assertEqual(new_keys[0], account.public_key.address) + records = await account.receiving.get_address_records() + pubkey = records[0].pop('pubkey') + self.assertListEqual(records, [{ + 'chain': 0, + 'account': account.public_key.address, + 'address': account.public_key.address, + 'history': None, + 'used_times': 0 + }]) + self.assertEqual( + pubkey.extended_key_string(), + account.public_key.extended_key_string() + ) + + # case #1: no new addresses needed + empty = await account.receiving.ensure_address_gap() + self.assertEqual(len(empty), 0) + + # case #2: after use, still no new address needed + records = await account.receiving.get_address_records() + await self.ledger.db.set_address_history(records[0]['address'], 'a:1:') + empty = await account.receiving.ensure_address_gap() + self.assertEqual(len(empty), 0) + + async def test_get_or_create_usable_address(self): + account = self.account + + addresses = await account.receiving.get_addresses() + self.assertEqual(len(addresses), 0) + + address1 = await account.receiving.get_or_create_usable_address() + self.assertIsNotNone(address1) + + await self.ledger.db.set_address_history(address1, 'a:1:b:2:c:3:') + records = await account.receiving.get_address_records() + self.assertEqual(records[0]['used_times'], 3) + + address2 = await account.receiving.get_or_create_usable_address() + self.assertEqual(address1, address2) + + keys = await account.receiving.get_addresses() + self.assertEqual(len(keys), 1) + + async def test_generate_account_from_seed(self): + account = self.ledger.account_class.from_dict( + self.ledger, Wallet(), { + "seed": + "carbon smart garage balance margin twelve chest sword toas" + "t envelope bottom stomach absent", + 'address_generator': {'name': 'single-address'} + } + ) + self.assertEqual( + account.private_key.extended_key_string(), + 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' + '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', + ) + self.assertEqual( + account.public_key.extended_key_string(), + 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' + 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', + ) + address = await account.receiving.ensure_address_gap() + self.assertEqual(address[0], account.public_key.address) + + private_key = await self.ledger.get_private_key_for_address( + account.wallet, address[0] + ) + self.assertEqual( + private_key.extended_key_string(), + 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' + '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', + ) + + invalid_key = await self.ledger.get_private_key_for_address( + account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX' + ) + self.assertIsNone(invalid_key) + + self.assertEqual( + hexlify(private_key.wif()), + b'1c92caa0ef99bfd5e2ceb73b66da8cd726a9370be8c368d448a322f3c5b23aaab901' + ) + + async def test_load_and_save_account(self): + account_data = { + 'name': 'My Account', + 'modified_on': 123.456, + 'seed': + "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" + "h absent", + 'encrypted': False, + 'private_key': + 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' + '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', + 'public_key': + 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' + 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', + 'address_generator': {'name': 'single-address'} + } + + account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) + + await account.ensure_address_gap() + + addresses = await account.receiving.get_addresses() + self.assertEqual(len(addresses), 1) + addresses = await account.change.get_addresses() + self.assertEqual(len(addresses), 1) + + self.maxDiff = None + account_data['ledger'] = 'btc_mainnet' + self.assertDictEqual(account_data, account.to_dict()) + + +class AccountEncryptionTests(AsyncioTestCase): + password = "password" + init_vector = b'0000000000000000' + unencrypted_account = { + 'name': 'My Account', + 'seed': + "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" + "h absent", + 'encrypted': False, + 'private_key': + 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' + '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', + 'public_key': + 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' + 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', + 'address_generator': {'name': 'single-address'} + } + encrypted_account = { + 'name': 'My Account', + 'seed': + "MDAwMDAwMDAwMDAwMDAwMJ4e4W4pE6nQtPiD6MujNIQ7aFPhUBl63GwPziAgGN" + "MBTMoaSjZfyyvw7ELMCqAYTWJ61aV7K4lmd2hR11g9dpdnnpCb9f9j3zLZHRv7+" + "bIkZ//trah9AIkmrc/ZvNkC0Q==", + 'encrypted': True, + 'private_key': + 'MDAwMDAwMDAwMDAwMDAwMLkWikOLScA/ZxlFSGU7dl//7Q/1gS9h7vqQyrd8DX+' + 'jwcp7SwlJ1mkMwuraUaWLq9/LxiaGmqJBUZ50p77YVZbDycaCN1unBr1/i1q6RP' + 'Ob2MNCaG8nyjxZhQai+V/2JmJ+UnFMp3nHany7F8/Hr0g=', + 'public_key': + 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' + 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', + 'address_generator': {'name': 'single-address'} + } + + async def asyncSetUp(self): + self.ledger = ledger_class({ + 'db': ledger_class.database_class(':memory:'), + 'headers': ledger_class.headers_class(':memory:'), + }) + + def test_encrypt_wallet(self): + account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.unencrypted_account) + account.init_vectors = { + 'seed': self.init_vector, + 'private_key': self.init_vector + } + + self.assertFalse(account.encrypted) + self.assertIsNotNone(account.private_key) + account.encrypt(self.password) + self.assertTrue(account.encrypted) + self.assertEqual(account.seed, self.encrypted_account['seed']) + self.assertEqual(account.private_key_string, self.encrypted_account['private_key']) + self.assertIsNone(account.private_key) + + self.assertEqual(account.to_dict()['seed'], self.encrypted_account['seed']) + self.assertEqual(account.to_dict()['private_key'], self.encrypted_account['private_key']) + + account.decrypt(self.password) + self.assertEqual(account.init_vectors['private_key'], self.init_vector) + self.assertEqual(account.init_vectors['seed'], self.init_vector) + + self.assertEqual(account.seed, self.unencrypted_account['seed']) + self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key']) + + self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed']) + self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key']) + + self.assertFalse(account.encrypted) + + def test_decrypt_wallet(self): + account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.encrypted_account) + + self.assertTrue(account.encrypted) + account.decrypt(self.password) + self.assertEqual(account.init_vectors['private_key'], self.init_vector) + self.assertEqual(account.init_vectors['seed'], self.init_vector) + + self.assertFalse(account.encrypted) + + self.assertEqual(account.seed, self.unencrypted_account['seed']) + self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key']) + + self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed']) + self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key']) + self.assertEqual(account.to_dict()['seed'], self.unencrypted_account['seed']) + self.assertEqual(account.to_dict()['private_key'], self.unencrypted_account['private_key']) + + def test_encrypt_decrypt_read_only_account(self): + account_data = self.unencrypted_account.copy() + del account_data['seed'] + del account_data['private_key'] + account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) + encrypted = account.to_dict('password') + self.assertFalse(encrypted['seed']) + self.assertFalse(encrypted['private_key']) + account.encrypt('password') + account.decrypt('password') diff --git a/torba/tests/client_tests/unit/test_bcd_data_stream.py b/lbry/tests/unit/wallet/test_bcd_data_stream.py similarity index 100% rename from torba/tests/client_tests/unit/test_bcd_data_stream.py rename to lbry/tests/unit/wallet/test_bcd_data_stream.py diff --git a/torba/tests/client_tests/unit/test_bip32.py b/lbry/tests/unit/wallet/test_bip32.py similarity index 100% rename from torba/tests/client_tests/unit/test_bip32.py rename to lbry/tests/unit/wallet/test_bip32.py diff --git a/torba/tests/client_tests/unit/test_coinselection.py b/lbry/tests/unit/wallet/test_coinselection.py similarity index 100% rename from torba/tests/client_tests/unit/test_coinselection.py rename to lbry/tests/unit/wallet/test_coinselection.py diff --git a/torba/tests/client_tests/unit/test_database.py b/lbry/tests/unit/wallet/test_database.py similarity index 100% rename from torba/tests/client_tests/unit/test_database.py rename to lbry/tests/unit/wallet/test_database.py diff --git a/torba/tests/client_tests/unit/test_hash.py b/lbry/tests/unit/wallet/test_hash.py similarity index 100% rename from torba/tests/client_tests/unit/test_hash.py rename to lbry/tests/unit/wallet/test_hash.py diff --git a/lbry/tests/unit/wallet/test_headers.py b/lbry/tests/unit/wallet/test_headers.py index 32fe40cbc..57358421a 100644 --- a/lbry/tests/unit/wallet/test_headers.py +++ b/lbry/tests/unit/wallet/test_headers.py @@ -1,3 +1,12 @@ +import asyncio +import os +import tempfile +from binascii import hexlify + +from torba.client.hash import sha256 +from torba.testcase import AsyncioTestCase + +from torba.coin.bitcoinsegwit import MainHeaders from binascii import unhexlify from torba.testcase import AsyncioTestCase @@ -6,6 +15,157 @@ from torba.client.util import ArithUint256 from lbry.wallet.ledger import Headers +def block_bytes(blocks): + return blocks * MainHeaders.header_size + + +class BitcoinHeadersTestCase(AsyncioTestCase): + HEADER_FILE = 'bitcoin_headers' + RETARGET_BLOCK = 32256 # difficulty: 1 -> 1.18 + + def setUp(self): + self.maxDiff = None + self.header_file_name = os.path.join(os.path.dirname(__file__), self.HEADER_FILE) + + def get_bytes(self, upto: int = -1, after: int = 0) -> bytes: + with open(self.header_file_name, 'rb') as headers: + headers.seek(after, os.SEEK_SET) + return headers.read(upto) + + async def get_headers(self, upto: int = -1): + h = MainHeaders(':memory:') + h.io.write(self.get_bytes(upto)) + return h + + +class BasicHeadersTests(BitcoinHeadersTestCase): + + async def test_serialization(self): + h = await self.get_headers() + self.assertDictEqual(h[0], { + 'bits': 486604799, + 'block_height': 0, + 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b', + 'nonce': 2083236893, + 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000', + 'timestamp': 1231006505, + 'version': 1 + }) + self.assertDictEqual(h[self.RETARGET_BLOCK-1], { + 'bits': 486604799, + 'block_height': 32255, + 'merkle_root': b'89b4f223789e40b5b475af6483bb05bceda54059e17d2053334b358f6bb310ac', + 'nonce': 312762301, + 'prev_block_hash': b'000000006baebaa74cecde6c6787c26ee0a616a3c333261bff36653babdac149', + 'timestamp': 1262152739, + 'version': 1 + }) + self.assertDictEqual(h[self.RETARGET_BLOCK], { + 'bits': 486594666, + 'block_height': 32256, + 'merkle_root': b'64b5e5f5a262f47af443a0120609206a3305877693edfe03e994f20a024ab627', + 'nonce': 121087187, + 'prev_block_hash': b'00000000984f962134a7291e3693075ae03e521f0ee33378ec30a334d860034b', + 'timestamp': 1262153464, + 'version': 1 + }) + self.assertDictEqual(h[self.RETARGET_BLOCK+1], { + 'bits': 486594666, + 'block_height': 32257, + 'merkle_root': b'4d1488981f08b3037878193297dbac701a2054e0f803d4424fe6a4d763d62334', + 'nonce': 274675219, + 'prev_block_hash': b'000000004f2886a170adb7204cb0c7a824217dd24d11a74423d564c4e0904967', + 'timestamp': 1262154352, + 'version': 1 + }) + self.assertEqual( + h.serialize(h[0]), + h.get_raw_header(0) + ) + self.assertEqual( + h.serialize(h[self.RETARGET_BLOCK]), + h.get_raw_header(self.RETARGET_BLOCK) + ) + + async def test_connect_from_genesis_to_3000_past_first_chunk_at_2016(self): + headers = MainHeaders(':memory:') + self.assertEqual(headers.height, -1) + await headers.connect(0, self.get_bytes(block_bytes(3001))) + self.assertEqual(headers.height, 3000) + + async def test_connect_9_blocks_passing_a_retarget_at_32256(self): + retarget = block_bytes(self.RETARGET_BLOCK-5) + headers = await self.get_headers(upto=retarget) + remainder = self.get_bytes(after=retarget) + self.assertEqual(headers.height, 32250) + await headers.connect(len(headers), remainder) + self.assertEqual(headers.height, 32259) + + async def test_bounds(self): + headers = MainHeaders(':memory:') + await headers.connect(0, self.get_bytes(block_bytes(3001))) + self.assertEqual(headers.height, 3000) + with self.assertRaises(IndexError): + _ = headers[3001] + with self.assertRaises(IndexError): + _ = headers[-1] + self.assertIsNotNone(headers[3000]) + self.assertIsNotNone(headers[0]) + + async def test_repair(self): + headers = MainHeaders(':memory:') + await headers.connect(0, self.get_bytes(block_bytes(3001))) + self.assertEqual(headers.height, 3000) + await headers.repair() + self.assertEqual(headers.height, 3000) + # corrupt the middle of it + headers.io.seek(block_bytes(1500)) + headers.io.write(b"wtf") + await headers.repair() + self.assertEqual(headers.height, 1499) + self.assertEqual(len(headers), 1500) + # corrupt by appending + headers.io.seek(block_bytes(len(headers))) + headers.io.write(b"appending") + await headers.repair() + self.assertEqual(headers.height, 1499) + await headers.connect(len(headers), self.get_bytes(block_bytes(3001 - 1500), after=block_bytes(1500))) + self.assertEqual(headers.height, 3000) + + async def test_checkpointed_writer(self): + headers = MainHeaders(':memory:') + headers.checkpoint = 100, hexlify(sha256(self.get_bytes(block_bytes(100)))) + genblocks = lambda start, end: self.get_bytes(block_bytes(end - start), block_bytes(start)) + async with headers.checkpointed_connector() as buff: + buff.write(genblocks(0, 10)) + self.assertEqual(len(headers), 10) + async with headers.checkpointed_connector() as buff: + buff.write(genblocks(10, 100)) + self.assertEqual(len(headers), 100) + headers = MainHeaders(':memory:') + async with headers.checkpointed_connector() as buff: + buff.write(genblocks(0, 300)) + self.assertEqual(len(headers), 300) + + async def test_concurrency(self): + BLOCKS = 30 + headers_temporary_file = tempfile.mktemp() + headers = MainHeaders(headers_temporary_file) + await headers.open() + self.addCleanup(os.remove, headers_temporary_file) + async def writer(): + for block_index in range(BLOCKS): + await headers.connect(block_index, self.get_bytes(block_bytes(block_index + 1), block_bytes(block_index))) + async def reader(): + for block_index in range(BLOCKS): + while len(headers) < block_index: + await asyncio.sleep(0.000001) + assert headers[block_index]['block_height'] == block_index + reader_task = asyncio.create_task(reader()) + await writer() + await reader_task + + class TestHeaders(AsyncioTestCase): def test_deserialize(self): diff --git a/lbry/tests/unit/wallet/test_ledger.py b/lbry/tests/unit/wallet/test_ledger.py index 9e2a5eac9..47fb9edd0 100644 --- a/lbry/tests/unit/wallet/test_ledger.py +++ b/lbry/tests/unit/wallet/test_ledger.py @@ -1,3 +1,11 @@ +import os +from binascii import hexlify + +from torba.coin.bitcoinsegwit import MainNetLedger +from torba.client.wallet import Wallet + +from client_tests.unit.test_transaction import get_transaction, get_output +from client_tests.unit.test_headers import BitcoinHeadersTestCase, block_bytes from torba.testcase import AsyncioTestCase from torba.client.wallet import Wallet @@ -6,6 +14,32 @@ from lbry.wallet.transaction import Transaction, Output, Input from lbry.wallet.ledger import MainNetLedger +class MockNetwork: + + def __init__(self, history, transaction): + self.history = history + self.transaction = transaction + self.address = None + self.get_history_called = [] + self.get_transaction_called = [] + self.is_connected = False + + def retriable_call(self, function, *args, **kwargs): + return function(*args, **kwargs) + + async def get_history(self, address): + self.get_history_called.append(address) + self.address = address + return self.history + + async def get_merkle(self, txid, height): + return {'merkle': ['abcd01'], 'pos': 1} + + async def get_transaction(self, tx_hash, _=None): + self.get_transaction_called.append(tx_hash) + return self.transaction[tx_hash] + + class LedgerTestCase(AsyncioTestCase): async def asyncSetUp(self): @@ -19,6 +53,129 @@ class LedgerTestCase(AsyncioTestCase): async def asyncTearDown(self): await self.ledger.db.close() + def make_header(self, **kwargs): + header = { + 'bits': 486604799, + 'block_height': 0, + 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b', + 'nonce': 2083236893, + 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000', + 'timestamp': 1231006505, + 'version': 1 + } + header.update(kwargs) + header['merkle_root'] = header['merkle_root'].ljust(64, b'a') + header['prev_block_hash'] = header['prev_block_hash'].ljust(64, b'0') + return self.ledger.headers.serialize(header) + + def add_header(self, **kwargs): + serialized = self.make_header(**kwargs) + self.ledger.headers.io.seek(0, os.SEEK_END) + self.ledger.headers.io.write(serialized) + self.ledger.headers._size = None + + +class TestSynchronization(LedgerTestCase): + + async def test_update_history(self): + account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba") + address = await account.receiving.get_or_create_usable_address() + address_details = await self.ledger.db.get_address(address=address) + self.assertIsNone(address_details['history']) + + self.add_header(block_height=0, merkle_root=b'abcd04') + self.add_header(block_height=1, merkle_root=b'abcd04') + self.add_header(block_height=2, merkle_root=b'abcd04') + self.add_header(block_height=3, merkle_root=b'abcd04') + self.ledger.network = MockNetwork([ + {'tx_hash': 'abcd01', 'height': 0}, + {'tx_hash': 'abcd02', 'height': 1}, + {'tx_hash': 'abcd03', 'height': 2}, + ], { + 'abcd01': hexlify(get_transaction(get_output(1)).raw), + 'abcd02': hexlify(get_transaction(get_output(2)).raw), + 'abcd03': hexlify(get_transaction(get_output(3)).raw), + }) + await self.ledger.update_history(address, '') + self.assertListEqual(self.ledger.network.get_history_called, [address]) + self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd01', 'abcd02', 'abcd03']) + + address_details = await self.ledger.db.get_address(address=address) + self.assertEqual( + address_details['history'], + '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:' + 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:' + 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:' + ) + + self.ledger.network.get_history_called = [] + self.ledger.network.get_transaction_called = [] + await self.ledger.update_history(address, '') + self.assertListEqual(self.ledger.network.get_history_called, [address]) + self.assertListEqual(self.ledger.network.get_transaction_called, []) + + self.ledger.network.history.append({'tx_hash': 'abcd04', 'height': 3}) + self.ledger.network.transaction['abcd04'] = hexlify(get_transaction(get_output(4)).raw) + self.ledger.network.get_history_called = [] + self.ledger.network.get_transaction_called = [] + await self.ledger.update_history(address, '') + self.assertListEqual(self.ledger.network.get_history_called, [address]) + self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd04']) + address_details = await self.ledger.db.get_address(address=address) + self.assertEqual( + address_details['history'], + '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:' + 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:' + 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:' + '047cf1d53ef68f0fd586d46f90c09ff8e57a4180f67e7f4b8dd0135c3741e828:3:' + ) + + +class MocHeaderNetwork(MockNetwork): + def __init__(self, responses): + super().__init__(None, None) + self.responses = responses + + async def get_headers(self, height, blocks): + return self.responses[height] + + +class BlockchainReorganizationTests(LedgerTestCase): + + async def test_1_block_reorganization(self): + self.ledger.network = MocHeaderNetwork({ + 20: {'height': 20, 'count': 5, 'hex': hexlify( + self.get_bytes(after=block_bytes(20), upto=block_bytes(5)) + )}, + 25: {'height': 25, 'count': 0, 'hex': b''} + }) + headers = self.ledger.headers + await headers.connect(0, self.get_bytes(upto=block_bytes(20))) + self.add_header(block_height=len(headers)) + self.assertEqual(headers.height, 20) + await self.ledger.receive_header([{ + 'height': 21, 'hex': hexlify(self.make_header(block_height=21)) + }]) + + async def test_3_block_reorganization(self): + self.ledger.network = MocHeaderNetwork({ + 20: {'height': 20, 'count': 5, 'hex': hexlify( + self.get_bytes(after=block_bytes(20), upto=block_bytes(5)) + )}, + 21: {'height': 21, 'count': 1, 'hex': hexlify(self.make_header(block_height=21))}, + 22: {'height': 22, 'count': 1, 'hex': hexlify(self.make_header(block_height=22))}, + 25: {'height': 25, 'count': 0, 'hex': b''} + }) + headers = self.ledger.headers + await headers.connect(0, self.get_bytes(upto=block_bytes(20))) + self.add_header(block_height=len(headers)) + self.add_header(block_height=len(headers)) + self.add_header(block_height=len(headers)) + self.assertEqual(headers.height, 22) + await self.ledger.receive_header(({ + 'height': 23, 'hex': hexlify(self.make_header(block_height=23)) + },)) + class BasicAccountingTests(LedgerTestCase): diff --git a/torba/tests/client_tests/unit/test_mnemonic.py b/lbry/tests/unit/wallet/test_mnemonic.py similarity index 100% rename from torba/tests/client_tests/unit/test_mnemonic.py rename to lbry/tests/unit/wallet/test_mnemonic.py diff --git a/lbry/tests/unit/wallet/test_script.py b/lbry/tests/unit/wallet/test_script.py index 839925c87..0bf449ca3 100644 --- a/lbry/tests/unit/wallet/test_script.py +++ b/lbry/tests/unit/wallet/test_script.py @@ -2,6 +2,224 @@ import unittest from binascii import hexlify, unhexlify from lbry.wallet.script import OutputScript +import unittest +from binascii import hexlify, unhexlify + +from torba.client.bcd_data_stream import BCDataStream +from torba.client.basescript import Template, ParseError, tokenize, push_data +from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL +from torba.client.basescript import BaseInputScript, BaseOutputScript + + +def parse(opcodes, source): + template = Template('test', opcodes) + s = BCDataStream() + for t in source: + if isinstance(t, bytes): + s.write_many(push_data(t)) + elif isinstance(t, int): + s.write_uint8(t) + else: + raise ValueError() + s.reset() + return template.parse(tokenize(s)) + + +class TestScriptTemplates(unittest.TestCase): + + def test_push_data(self): + self.assertDictEqual(parse( + (PUSH_SINGLE('script_hash'),), + (b'abcdef',) + ), { + 'script_hash': b'abcdef' + } + ) + self.assertDictEqual(parse( + (PUSH_SINGLE('first'), PUSH_INTEGER('rating')), + (b'Satoshi', (1000).to_bytes(2, 'little')) + ), { + 'first': b'Satoshi', + 'rating': 1000, + } + ) + self.assertDictEqual(parse( + (OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL), + (OP_HASH160, b'abcdef', OP_EQUAL) + ), { + 'script_hash': b'abcdef' + } + ) + + def test_push_data_many(self): + self.assertDictEqual(parse( + (PUSH_MANY('names'),), + (b'amit',) + ), { + 'names': [b'amit'] + } + ) + self.assertDictEqual(parse( + (PUSH_MANY('names'),), + (b'jeremy', b'amit', b'victor') + ), { + 'names': [b'jeremy', b'amit', b'victor'] + } + ) + self.assertDictEqual(parse( + (OP_HASH160, PUSH_MANY('names'), OP_EQUAL), + (OP_HASH160, b'grin', b'jack', OP_EQUAL) + ), { + 'names': [b'grin', b'jack'] + } + ) + + def test_push_data_mixed(self): + self.assertDictEqual(parse( + (PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')), + (b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH') + ), { + 'CEO': b'jeremy', + 'CTO': b'grin', + 'Devs': [b'lex', b'amit', b'victor', b'jack'], + 'State': b'NH' + } + ) + + def test_push_data_many_separated(self): + self.assertDictEqual(parse( + (PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')), + (b'jeremy', b'grin', OP_HASH160, b'lex', b'jack') + ), { + 'Chiefs': [b'jeremy', b'grin'], + 'Devs': [b'lex', b'jack'] + } + ) + + def test_push_data_many_not_separated(self): + with self.assertRaisesRegex(ParseError, 'consecutive PUSH_MANY'): + parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack')) + + +class TestRedeemPubKeyHash(unittest.TestCase): + + def redeem_pubkey_hash(self, sig, pubkey): + # this checks that factory function correctly sets up the script + src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey)) + self.assertEqual(src1.template.name, 'pubkey_hash') + self.assertEqual(hexlify(src1.values['signature']), sig) + self.assertEqual(hexlify(src1.values['pubkey']), pubkey) + # now we test that it will round trip + src2 = BaseInputScript(src1.source) + self.assertEqual(src2.template.name, 'pubkey_hash') + self.assertEqual(hexlify(src2.values['signature']), sig) + self.assertEqual(hexlify(src2.values['pubkey']), pubkey) + return hexlify(src1.source) + + def test_redeem_pubkey_hash_1(self): + self.assertEqual( + self.redeem_pubkey_hash( + b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e' + b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301', + b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b' + ), + b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d' + b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3' + b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b' + ) + + +class TestRedeemScriptHash(unittest.TestCase): + + def redeem_script_hash(self, sigs, pubkeys): + # this checks that factory function correctly sets up the script + src1 = BaseInputScript.redeem_script_hash( + [unhexlify(sig) for sig in sigs], + [unhexlify(pubkey) for pubkey in pubkeys] + ) + subscript1 = src1.values['script'] + self.assertEqual(src1.template.name, 'script_hash') + self.assertListEqual([hexlify(v) for v in src1.values['signatures']], sigs) + self.assertListEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys) + self.assertEqual(subscript1.values['signatures_count'], len(sigs)) + self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys)) + # now we test that it will round trip + src2 = BaseInputScript(src1.source) + subscript2 = src2.values['script'] + self.assertEqual(src2.template.name, 'script_hash') + self.assertListEqual([hexlify(v) for v in src2.values['signatures']], sigs) + self.assertListEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys) + self.assertEqual(subscript2.values['signatures_count'], len(sigs)) + self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys)) + return hexlify(src1.source) + + def test_redeem_script_hash_1(self): + self.assertEqual( + self.redeem_script_hash([ + b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575' + b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401', + b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68' + b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01', + b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd' + b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01' + ], [ + b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4', + b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692', + b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c', + b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb', + b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89' + ]), + b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e' + b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd' + b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba' + b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a' + b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3' + b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103' + b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2' + b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89' + b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171' + b'ad0abeaa8955ae' + ) + + +class TestPayPubKeyHash(unittest.TestCase): + + def pay_pubkey_hash(self, pubkey_hash): + # this checks that factory function correctly sets up the script + src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash)) + self.assertEqual(src1.template.name, 'pay_pubkey_hash') + self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash) + # now we test that it will round trip + src2 = BaseOutputScript(src1.source) + self.assertEqual(src2.template.name, 'pay_pubkey_hash') + self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash) + return hexlify(src1.source) + + def test_pay_pubkey_hash_1(self): + self.assertEqual( + self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'), + b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac' + ) + + +class TestPayScriptHash(unittest.TestCase): + + def pay_script_hash(self, script_hash): + # this checks that factory function correctly sets up the script + src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash)) + self.assertEqual(src1.template.name, 'pay_script_hash') + self.assertEqual(hexlify(src1.values['script_hash']), script_hash) + # now we test that it will round trip + src2 = BaseOutputScript(src1.source) + self.assertEqual(src2.template.name, 'pay_script_hash') + self.assertEqual(hexlify(src2.values['script_hash']), script_hash) + return hexlify(src1.source) + + def test_pay_pubkey_hash_1(self): + self.assertEqual( + self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'), + b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87' + ) class TestPayClaimNamePubkeyHash(unittest.TestCase): diff --git a/torba/tests/client_tests/unit/test_stream_controller.py b/lbry/tests/unit/wallet/test_stream_controller.py similarity index 100% rename from torba/tests/client_tests/unit/test_stream_controller.py rename to lbry/tests/unit/wallet/test_stream_controller.py diff --git a/lbry/tests/unit/wallet/test_transaction.py b/lbry/tests/unit/wallet/test_transaction.py index d3e4503cb..03e0163e7 100644 --- a/lbry/tests/unit/wallet/test_transaction.py +++ b/lbry/tests/unit/wallet/test_transaction.py @@ -81,6 +81,50 @@ class TestSizeAndFeeEstimation(AsyncioTestCase): self.assertEqual(tx.get_base_fee(self.ledger), FEE_PER_BYTE * tx.base_size) +class TestAccountBalanceImpactFromTransaction(unittest.TestCase): + + def test_is_my_account_not_set(self): + tx = get_transaction() + with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"): + _ = tx.net_account_balance + tx.inputs[0].txo_ref.txo.is_my_account = True + with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"): + _ = tx.net_account_balance + tx.outputs[0].is_my_account = True + # all inputs/outputs are set now so it should work + _ = tx.net_account_balance + + def test_paying_from_my_account_to_other_account(self): + tx = ledger_class.transaction_class() \ + .add_inputs([get_input(300*CENT)]) \ + .add_outputs([get_output(190*CENT, NULL_HASH), + get_output(100*CENT, NULL_HASH)]) + tx.inputs[0].txo_ref.txo.is_my_account = True + tx.outputs[0].is_my_account = False + tx.outputs[1].is_my_account = True + self.assertEqual(tx.net_account_balance, -200*CENT) + + def test_paying_from_other_account_to_my_account(self): + tx = ledger_class.transaction_class() \ + .add_inputs([get_input(300*CENT)]) \ + .add_outputs([get_output(190*CENT, NULL_HASH), + get_output(100*CENT, NULL_HASH)]) + tx.inputs[0].txo_ref.txo.is_my_account = False + tx.outputs[0].is_my_account = True + tx.outputs[1].is_my_account = False + self.assertEqual(tx.net_account_balance, 190*CENT) + + def test_paying_from_my_account_to_my_account(self): + tx = ledger_class.transaction_class() \ + .add_inputs([get_input(300*CENT)]) \ + .add_outputs([get_output(190*CENT, NULL_HASH), + get_output(100*CENT, NULL_HASH)]) + tx.inputs[0].txo_ref.txo.is_my_account = True + tx.outputs[0].is_my_account = True + tx.outputs[1].is_my_account = True + self.assertEqual(tx.net_account_balance, -10*CENT) # lost to fee + + class TestTransactionSerialization(unittest.TestCase): def test_genesis_transaction(self): @@ -254,3 +298,125 @@ class TestTransactionSigning(AsyncioTestCase): b'304402200dafa26ad7cf38c5a971c8a25ce7d85a076235f146126762296b1223c42ae21e022020ef9eeb8' b'398327891008c5c0be4357683f12cb22346691ff23914f457bf679601' ) + + +class TransactionIOBalancing(AsyncioTestCase): + + async def asyncSetUp(self): + self.ledger = ledger_class({ + 'db': ledger_class.database_class(':memory:'), + 'headers': ledger_class.headers_class(':memory:'), + }) + await self.ledger.db.open() + self.account = self.ledger.account_class.from_dict( + self.ledger, Wallet(), { + "seed": "carbon smart garage balance margin twelve chest sword " + "toast envelope bottom stomach absent" + } + ) + + addresses = await self.account.ensure_address_gap() + self.pubkey_hash = [self.ledger.address_to_hash160(a) for a in addresses] + self.hash_cycler = cycle(self.pubkey_hash) + + async def asyncTearDown(self): + await self.ledger.db.close() + + def txo(self, amount, address=None): + return get_output(int(amount*COIN), address or next(self.hash_cycler)) + + def txi(self, txo): + return ledger_class.transaction_class.input_class.spend(txo) + + def tx(self, inputs, outputs): + return ledger_class.transaction_class.create(inputs, outputs, [self.account], self.account) + + async def create_utxos(self, amounts): + utxos = [self.txo(amount) for amount in amounts] + + self.funding_tx = ledger_class.transaction_class(is_verified=True) \ + .add_inputs([self.txi(self.txo(sum(amounts)+0.1))]) \ + .add_outputs(utxos) + + await self.ledger.db.insert_transaction(self.funding_tx) + + for utxo in utxos: + await self.ledger.db.save_transaction_io( + self.funding_tx, + self.ledger.hash160_to_address(utxo.script.values['pubkey_hash']), + utxo.script.values['pubkey_hash'], '' + ) + + return utxos + + @staticmethod + def inputs(tx): + return [round(i.amount/COIN, 2) for i in tx.inputs] + + @staticmethod + def outputs(tx): + return [round(o.amount/COIN, 2) for o in tx.outputs] + + async def test_basic_use_cases(self): + self.ledger.fee_per_byte = int(.01*CENT) + + # available UTXOs for filling missing inputs + utxos = await self.create_utxos([ + 1, 1, 3, 5, 10 + ]) + + # pay 3 coins (3.02 w/ fees) + tx = await self.tx( + [], # inputs + [self.txo(3)] # outputs + ) + # best UTXO match is 5 (as UTXO 3 will be short 0.02 to cover fees) + self.assertListEqual(self.inputs(tx), [5]) + # a change of 1.98 is added to reach balance + self.assertListEqual(self.outputs(tx), [3, 1.98]) + + await self.ledger.release_outputs(utxos) + + # pay 2.98 coins (3.00 w/ fees) + tx = await self.tx( + [], # inputs + [self.txo(2.98)] # outputs + ) + # best UTXO match is 3 and no change is needed + self.assertListEqual(self.inputs(tx), [3]) + self.assertListEqual(self.outputs(tx), [2.98]) + + await self.ledger.release_outputs(utxos) + + # supplied input and output, but input is not enough to cover output + tx = await self.tx( + [self.txi(self.txo(10))], # inputs + [self.txo(11)] # outputs + ) + # additional input is chosen (UTXO 3) + self.assertListEqual([10, 3], self.inputs(tx)) + # change is now needed to consume extra input + self.assertListEqual([11, 1.96], self.outputs(tx)) + + await self.ledger.release_outputs(utxos) + + # liquidating a UTXO + tx = await self.tx( + [self.txi(self.txo(10))], # inputs + [] # outputs + ) + self.assertListEqual([10], self.inputs(tx)) + # missing change added to consume the amount + self.assertListEqual([9.98], self.outputs(tx)) + + await self.ledger.release_outputs(utxos) + + # liquidating at a loss, requires adding extra inputs + tx = await self.tx( + [self.txi(self.txo(0.01))], # inputs + [] # outputs + ) + # UTXO 1 is added to cover some of the fee + self.assertListEqual([0.01, 1], self.inputs(tx)) + # change is now needed to consume extra input + self.assertListEqual([0.97], self.outputs(tx)) diff --git a/torba/tests/client_tests/unit/test_utils.py b/lbry/tests/unit/wallet/test_utils.py similarity index 100% rename from torba/tests/client_tests/unit/test_utils.py rename to lbry/tests/unit/wallet/test_utils.py diff --git a/torba/tests/client_tests/unit/test_wallet.py b/lbry/tests/unit/wallet/test_wallet.py similarity index 100% rename from torba/tests/client_tests/unit/test_wallet.py rename to lbry/tests/unit/wallet/test_wallet.py diff --git a/torba/.gitignore b/torba/.gitignore deleted file mode 100644 index 757aa9b42..000000000 --- a/torba/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -# packaging -torba.egg-info/ -dist/ - -# PyCharm -.idea/ - -# testing -.tox/ -torba/bin - -# cache and logs -__pycache__/ -.mypy_cache/ -_trial_temp/ -_trial_temp-*/ - -# OS X DS_Store -*.DS_Store - diff --git a/torba/CHANGELOG.md b/torba/CHANGELOG.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/torba/LICENSE b/torba/LICENSE deleted file mode 100644 index 53ed0f582..000000000 --- a/torba/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2018 LBRY Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/torba/README.md b/torba/README.md deleted file mode 100644 index 3c300aeb3..000000000 --- a/torba/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Torba Torba [![Build Status](https://travis-ci.org/lbryio/torba.svg?branch=master)](https://travis-ci.org/lbryio/torba) - -A new wallet library to help bitcoin based projects build fast, correct and scalable crypto currency wallets in Python. diff --git a/torba/setup.py b/torba/setup.py deleted file mode 100644 index 6f54f6390..000000000 --- a/torba/setup.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import sys -from setuptools import setup, find_packages - -import torba - -BASE = os.path.dirname(__file__) -with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh: - long_description = fh.read() - -REQUIRES = [ - 'aiohttp==3.5.4', - 'cffi==1.12.1', # TODO: 1.12.2 fails on travis in wine - 'coincurve==11.0.0', - 'pbkdf2==1.3', - 'cryptography==2.5', - 'attrs==18.2.0', - 'pylru==1.1.0' -] -if sys.platform.startswith('linux'): - REQUIRES.append('plyvel==1.0.5') - - -setup( - name='torba', - version=torba.__version__, - url='https://github.com/lbryio/torba', - license='MIT', - author='LBRY Inc.', - author_email='hello@lbry.io', - description='Wallet client/server framework for bitcoin based currencies.', - long_description=long_description, - long_description_content_type="text/markdown", - keywords='wallet,crypto,currency,money,bitcoin,electrum,electrumx', - classifiers=[ - 'Framework :: AsyncIO', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3', - 'Operating System :: OS Independent', - 'Topic :: Internet', - 'Topic :: Software Development :: Testing', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: System :: Benchmark', - 'Topic :: System :: Distributed Computing', - 'Topic :: Utilities', - ], - packages=find_packages(exclude=('tests',)), - python_requires='>=3.7', - install_requires=REQUIRES, - extras_require={ - 'gui': ( - 'pyside2', - ) - }, - entry_points={ - 'console_scripts': [ - 'torba-client=torba.client.cli:main', - 'torba-server=torba.server.cli:main', - 'orchstr8=torba.orchstr8.cli:main', - ], - 'gui_scripts': [ - 'torba=torba.ui:main [gui]', - 'torba-workbench=torba.workbench:main [gui]', - ] - } -) diff --git a/torba/tests/client_tests/unit/__init__.py b/torba/tests/client_tests/unit/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/torba/tests/client_tests/unit/bitcoin_headers b/torba/tests/client_tests/unit/bitcoin_headers deleted file mode 100644 index 5cb342901..000000000 Binary files a/torba/tests/client_tests/unit/bitcoin_headers and /dev/null differ diff --git a/torba/tests/client_tests/unit/test_account.py b/torba/tests/client_tests/unit/test_account.py deleted file mode 100644 index 0987edc82..000000000 --- a/torba/tests/client_tests/unit/test_account.py +++ /dev/null @@ -1,493 +0,0 @@ -from binascii import hexlify - -from torba.testcase import AsyncioTestCase - -from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class -from torba.client.baseaccount import HierarchicalDeterministic, SingleKey -from torba.client.wallet import Wallet - - -class TestHierarchicalDeterministicAccount(AsyncioTestCase): - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - await self.ledger.db.open() - self.account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba") - - async def asyncTearDown(self): - await self.ledger.db.close() - - async def test_generate_account(self): - account = self.account - - self.assertEqual(account.ledger, self.ledger) - self.assertIsNotNone(account.seed) - self.assertEqual(account.public_key.ledger, self.ledger) - self.assertEqual(account.private_key.public_key, account.public_key) - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 0) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 0) - - await account.ensure_address_gap() - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 20) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 6) - - addresses = await account.get_addresses() - self.assertEqual(len(addresses), 26) - - async def test_generate_keys_over_batch_threshold_saves_it_properly(self): - async with self.account.receiving.address_generator_lock: - await self.account.receiving._generate_keys(0, 200) - records = await self.account.receiving.get_address_records() - self.assertEqual(len(records), 201) - - async def test_ensure_address_gap(self): - account = self.account - - self.assertIsInstance(account.receiving, HierarchicalDeterministic) - - async with account.receiving.address_generator_lock: - await account.receiving._generate_keys(4, 7) - await account.receiving._generate_keys(0, 3) - await account.receiving._generate_keys(8, 11) - records = await account.receiving.get_address_records() - self.assertListEqual( - [r['pubkey'].n for r in records], - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] - ) - - # we have 12, but default gap is 20 - new_keys = await account.receiving.ensure_address_gap() - self.assertEqual(len(new_keys), 8) - records = await account.receiving.get_address_records() - self.assertListEqual( - [r['pubkey'].n for r in records], - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] - ) - - # case #1: no new addresses needed - empty = await account.receiving.ensure_address_gap() - self.assertEqual(len(empty), 0) - - # case #2: only one new addressed needed - records = await account.receiving.get_address_records() - await self.ledger.db.set_address_history(records[0]['address'], 'a:1:') - new_keys = await account.receiving.ensure_address_gap() - self.assertEqual(len(new_keys), 1) - - # case #3: 20 addresses needed - await self.ledger.db.set_address_history(new_keys[0], 'a:1:') - new_keys = await account.receiving.ensure_address_gap() - self.assertEqual(len(new_keys), 20) - - async def test_get_or_create_usable_address(self): - account = self.account - - keys = await account.receiving.get_addresses() - self.assertEqual(len(keys), 0) - - address = await account.receiving.get_or_create_usable_address() - self.assertIsNotNone(address) - - keys = await account.receiving.get_addresses() - self.assertEqual(len(keys), 20) - - async def test_generate_account_from_seed(self): - account = self.ledger.account_class.from_dict( - self.ledger, Wallet(), { - "seed": "carbon smart garage balance margin twelve chest sword " - "toast envelope bottom stomach absent", - "address_generator": { - 'name': 'deterministic-chain', - 'receiving': {'gap': 3, 'maximum_uses_per_address': 1}, - 'change': {'gap': 2, 'maximum_uses_per_address': 1} - } - } - ) - self.assertEqual( - account.private_key.extended_key_string(), - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp5BxK' - 'Kfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna' - ) - self.assertEqual( - account.public_key.extended_key_string(), - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7UbpV' - 'NzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g' - ) - address = await account.receiving.ensure_address_gap() - self.assertEqual(address[0], '1CDLuMfwmPqJiNk5C2Bvew6tpgjAGgUk8J') - - private_key = await self.ledger.get_private_key_for_address( - account.wallet, '1CDLuMfwmPqJiNk5C2Bvew6tpgjAGgUk8J' - ) - self.assertEqual( - private_key.extended_key_string(), - 'xprv9xV7rhbg6M4yWrdTeLorz3Q1GrQb4aQzzGWboP3du7W7UUztzNTUrEYTnDfz7o' - 'ptBygDxXYRppyiuenJpoBTgYP2C26E1Ah5FEALM24CsWi' - ) - - invalid_key = await self.ledger.get_private_key_for_address( - account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX' - ) - self.assertIsNone(invalid_key) - - self.assertEqual( - hexlify(private_key.wif()), - b'1c01ae1e4c7d89e39f6d3aa7792c097a30ca7d40be249b6de52c81ec8cf9aab48b01' - ) - - async def test_load_and_save_account(self): - account_data = { - 'name': 'My Account', - 'modified_on': 123.456, - 'seed': - "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" - "h absent", - 'encrypted': False, - 'private_key': - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - 'public_key': - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - 'address_generator': { - 'name': 'deterministic-chain', - 'receiving': {'gap': 5, 'maximum_uses_per_address': 2}, - 'change': {'gap': 5, 'maximum_uses_per_address': 2} - } - } - - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) - - await account.ensure_address_gap() - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 5) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 5) - - self.maxDiff = None - account_data['ledger'] = 'btc_mainnet' - self.assertDictEqual(account_data, account.to_dict()) - - def test_merge_diff(self): - account_data = { - 'name': 'My Account', - 'modified_on': 123.456, - 'seed': - "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" - "h absent", - 'encrypted': False, - 'private_key': - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - 'public_key': - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - 'address_generator': { - 'name': 'deterministic-chain', - 'receiving': {'gap': 5, 'maximum_uses_per_address': 2}, - 'change': {'gap': 5, 'maximum_uses_per_address': 2} - } - } - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) - - self.assertEqual(account.name, 'My Account') - self.assertEqual(account.modified_on, 123.456) - self.assertEqual(account.change.gap, 5) - self.assertEqual(account.change.maximum_uses_per_address, 2) - self.assertEqual(account.receiving.gap, 5) - self.assertEqual(account.receiving.maximum_uses_per_address, 2) - - account_data['name'] = 'Changed Name' - account_data['address_generator']['change']['gap'] = 6 - account_data['address_generator']['change']['maximum_uses_per_address'] = 7 - account_data['address_generator']['receiving']['gap'] = 8 - account_data['address_generator']['receiving']['maximum_uses_per_address'] = 9 - - account.merge(account_data) - # no change because modified_on is not newer - self.assertEqual(account.name, 'My Account') - - account_data['modified_on'] = 200.00 - - account.merge(account_data) - self.assertEqual(account.name, 'Changed Name') - self.assertEqual(account.change.gap, 6) - self.assertEqual(account.change.maximum_uses_per_address, 7) - self.assertEqual(account.receiving.gap, 8) - self.assertEqual(account.receiving.maximum_uses_per_address, 9) - - -class TestSingleKeyAccount(AsyncioTestCase): - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - await self.ledger.db.open() - self.account = self.ledger.account_class.generate( - self.ledger, Wallet(), "torba", {'name': 'single-address'}) - - async def asyncTearDown(self): - await self.ledger.db.close() - - async def test_generate_account(self): - account = self.account - - self.assertEqual(account.ledger, self.ledger) - self.assertIsNotNone(account.seed) - self.assertEqual(account.public_key.ledger, self.ledger) - self.assertEqual(account.private_key.public_key, account.public_key) - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 0) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 0) - - await account.ensure_address_gap() - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 1) - self.assertEqual(addresses[0], account.public_key.address) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 1) - self.assertEqual(addresses[0], account.public_key.address) - - addresses = await account.get_addresses() - self.assertEqual(len(addresses), 1) - self.assertEqual(addresses[0], account.public_key.address) - - async def test_ensure_address_gap(self): - account = self.account - - self.assertIsInstance(account.receiving, SingleKey) - addresses = await account.receiving.get_addresses() - self.assertListEqual(addresses, []) - - # we have 12, but default gap is 20 - new_keys = await account.receiving.ensure_address_gap() - self.assertEqual(len(new_keys), 1) - self.assertEqual(new_keys[0], account.public_key.address) - records = await account.receiving.get_address_records() - pubkey = records[0].pop('pubkey') - self.assertListEqual(records, [{ - 'chain': 0, - 'account': account.public_key.address, - 'address': account.public_key.address, - 'history': None, - 'used_times': 0 - }]) - self.assertEqual( - pubkey.extended_key_string(), - account.public_key.extended_key_string() - ) - - # case #1: no new addresses needed - empty = await account.receiving.ensure_address_gap() - self.assertEqual(len(empty), 0) - - # case #2: after use, still no new address needed - records = await account.receiving.get_address_records() - await self.ledger.db.set_address_history(records[0]['address'], 'a:1:') - empty = await account.receiving.ensure_address_gap() - self.assertEqual(len(empty), 0) - - async def test_get_or_create_usable_address(self): - account = self.account - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 0) - - address1 = await account.receiving.get_or_create_usable_address() - self.assertIsNotNone(address1) - - await self.ledger.db.set_address_history(address1, 'a:1:b:2:c:3:') - records = await account.receiving.get_address_records() - self.assertEqual(records[0]['used_times'], 3) - - address2 = await account.receiving.get_or_create_usable_address() - self.assertEqual(address1, address2) - - keys = await account.receiving.get_addresses() - self.assertEqual(len(keys), 1) - - async def test_generate_account_from_seed(self): - account = self.ledger.account_class.from_dict( - self.ledger, Wallet(), { - "seed": - "carbon smart garage balance margin twelve chest sword toas" - "t envelope bottom stomach absent", - 'address_generator': {'name': 'single-address'} - } - ) - self.assertEqual( - account.private_key.extended_key_string(), - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - ) - self.assertEqual( - account.public_key.extended_key_string(), - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - ) - address = await account.receiving.ensure_address_gap() - self.assertEqual(address[0], account.public_key.address) - - private_key = await self.ledger.get_private_key_for_address( - account.wallet, address[0] - ) - self.assertEqual( - private_key.extended_key_string(), - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - ) - - invalid_key = await self.ledger.get_private_key_for_address( - account.wallet, 'BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX' - ) - self.assertIsNone(invalid_key) - - self.assertEqual( - hexlify(private_key.wif()), - b'1c92caa0ef99bfd5e2ceb73b66da8cd726a9370be8c368d448a322f3c5b23aaab901' - ) - - async def test_load_and_save_account(self): - account_data = { - 'name': 'My Account', - 'modified_on': 123.456, - 'seed': - "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" - "h absent", - 'encrypted': False, - 'private_key': - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - 'public_key': - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - 'address_generator': {'name': 'single-address'} - } - - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) - - await account.ensure_address_gap() - - addresses = await account.receiving.get_addresses() - self.assertEqual(len(addresses), 1) - addresses = await account.change.get_addresses() - self.assertEqual(len(addresses), 1) - - self.maxDiff = None - account_data['ledger'] = 'btc_mainnet' - self.assertDictEqual(account_data, account.to_dict()) - - -class AccountEncryptionTests(AsyncioTestCase): - password = "password" - init_vector = b'0000000000000000' - unencrypted_account = { - 'name': 'My Account', - 'seed': - "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac" - "h absent", - 'encrypted': False, - 'private_key': - 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3ZT4vYymkp' - '5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna', - 'public_key': - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - 'address_generator': {'name': 'single-address'} - } - encrypted_account = { - 'name': 'My Account', - 'seed': - "MDAwMDAwMDAwMDAwMDAwMJ4e4W4pE6nQtPiD6MujNIQ7aFPhUBl63GwPziAgGN" - "MBTMoaSjZfyyvw7ELMCqAYTWJ61aV7K4lmd2hR11g9dpdnnpCb9f9j3zLZHRv7+" - "bIkZ//trah9AIkmrc/ZvNkC0Q==", - 'encrypted': True, - 'private_key': - 'MDAwMDAwMDAwMDAwMDAwMLkWikOLScA/ZxlFSGU7dl//7Q/1gS9h7vqQyrd8DX+' - 'jwcp7SwlJ1mkMwuraUaWLq9/LxiaGmqJBUZ50p77YVZbDycaCN1unBr1/i1q6RP' - 'Ob2MNCaG8nyjxZhQai+V/2JmJ+UnFMp3nHany7F8/Hr0g=', - 'public_key': - 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6mKUMJFc7' - 'UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g', - 'address_generator': {'name': 'single-address'} - } - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - - def test_encrypt_wallet(self): - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.unencrypted_account) - account.init_vectors = { - 'seed': self.init_vector, - 'private_key': self.init_vector - } - - self.assertFalse(account.encrypted) - self.assertIsNotNone(account.private_key) - account.encrypt(self.password) - self.assertTrue(account.encrypted) - self.assertEqual(account.seed, self.encrypted_account['seed']) - self.assertEqual(account.private_key_string, self.encrypted_account['private_key']) - self.assertIsNone(account.private_key) - - self.assertEqual(account.to_dict()['seed'], self.encrypted_account['seed']) - self.assertEqual(account.to_dict()['private_key'], self.encrypted_account['private_key']) - - account.decrypt(self.password) - self.assertEqual(account.init_vectors['private_key'], self.init_vector) - self.assertEqual(account.init_vectors['seed'], self.init_vector) - - self.assertEqual(account.seed, self.unencrypted_account['seed']) - self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key']) - - self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed']) - self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key']) - - self.assertFalse(account.encrypted) - - def test_decrypt_wallet(self): - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), self.encrypted_account) - - self.assertTrue(account.encrypted) - account.decrypt(self.password) - self.assertEqual(account.init_vectors['private_key'], self.init_vector) - self.assertEqual(account.init_vectors['seed'], self.init_vector) - - self.assertFalse(account.encrypted) - - self.assertEqual(account.seed, self.unencrypted_account['seed']) - self.assertEqual(account.private_key.extended_key_string(), self.unencrypted_account['private_key']) - - self.assertEqual(account.to_dict(encrypt_password=self.password)['seed'], self.encrypted_account['seed']) - self.assertEqual(account.to_dict(encrypt_password=self.password)['private_key'], self.encrypted_account['private_key']) - self.assertEqual(account.to_dict()['seed'], self.unencrypted_account['seed']) - self.assertEqual(account.to_dict()['private_key'], self.unencrypted_account['private_key']) - - def test_encrypt_decrypt_read_only_account(self): - account_data = self.unencrypted_account.copy() - del account_data['seed'] - del account_data['private_key'] - account = self.ledger.account_class.from_dict(self.ledger, Wallet(), account_data) - encrypted = account.to_dict('password') - self.assertFalse(encrypted['seed']) - self.assertFalse(encrypted['private_key']) - account.encrypt('password') - account.decrypt('password') diff --git a/torba/tests/client_tests/unit/test_headers.py b/torba/tests/client_tests/unit/test_headers.py deleted file mode 100644 index 5f43ca71f..000000000 --- a/torba/tests/client_tests/unit/test_headers.py +++ /dev/null @@ -1,160 +0,0 @@ -import asyncio -import os -import tempfile -from binascii import hexlify - -from torba.client.hash import sha256 -from torba.testcase import AsyncioTestCase - -from torba.coin.bitcoinsegwit import MainHeaders - - -def block_bytes(blocks): - return blocks * MainHeaders.header_size - - -class BitcoinHeadersTestCase(AsyncioTestCase): - HEADER_FILE = 'bitcoin_headers' - RETARGET_BLOCK = 32256 # difficulty: 1 -> 1.18 - - def setUp(self): - self.maxDiff = None - self.header_file_name = os.path.join(os.path.dirname(__file__), self.HEADER_FILE) - - def get_bytes(self, upto: int = -1, after: int = 0) -> bytes: - with open(self.header_file_name, 'rb') as headers: - headers.seek(after, os.SEEK_SET) - return headers.read(upto) - - async def get_headers(self, upto: int = -1): - h = MainHeaders(':memory:') - h.io.write(self.get_bytes(upto)) - return h - - -class BasicHeadersTests(BitcoinHeadersTestCase): - - async def test_serialization(self): - h = await self.get_headers() - self.assertDictEqual(h[0], { - 'bits': 486604799, - 'block_height': 0, - 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b', - 'nonce': 2083236893, - 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000', - 'timestamp': 1231006505, - 'version': 1 - }) - self.assertDictEqual(h[self.RETARGET_BLOCK-1], { - 'bits': 486604799, - 'block_height': 32255, - 'merkle_root': b'89b4f223789e40b5b475af6483bb05bceda54059e17d2053334b358f6bb310ac', - 'nonce': 312762301, - 'prev_block_hash': b'000000006baebaa74cecde6c6787c26ee0a616a3c333261bff36653babdac149', - 'timestamp': 1262152739, - 'version': 1 - }) - self.assertDictEqual(h[self.RETARGET_BLOCK], { - 'bits': 486594666, - 'block_height': 32256, - 'merkle_root': b'64b5e5f5a262f47af443a0120609206a3305877693edfe03e994f20a024ab627', - 'nonce': 121087187, - 'prev_block_hash': b'00000000984f962134a7291e3693075ae03e521f0ee33378ec30a334d860034b', - 'timestamp': 1262153464, - 'version': 1 - }) - self.assertDictEqual(h[self.RETARGET_BLOCK+1], { - 'bits': 486594666, - 'block_height': 32257, - 'merkle_root': b'4d1488981f08b3037878193297dbac701a2054e0f803d4424fe6a4d763d62334', - 'nonce': 274675219, - 'prev_block_hash': b'000000004f2886a170adb7204cb0c7a824217dd24d11a74423d564c4e0904967', - 'timestamp': 1262154352, - 'version': 1 - }) - self.assertEqual( - h.serialize(h[0]), - h.get_raw_header(0) - ) - self.assertEqual( - h.serialize(h[self.RETARGET_BLOCK]), - h.get_raw_header(self.RETARGET_BLOCK) - ) - - async def test_connect_from_genesis_to_3000_past_first_chunk_at_2016(self): - headers = MainHeaders(':memory:') - self.assertEqual(headers.height, -1) - await headers.connect(0, self.get_bytes(block_bytes(3001))) - self.assertEqual(headers.height, 3000) - - async def test_connect_9_blocks_passing_a_retarget_at_32256(self): - retarget = block_bytes(self.RETARGET_BLOCK-5) - headers = await self.get_headers(upto=retarget) - remainder = self.get_bytes(after=retarget) - self.assertEqual(headers.height, 32250) - await headers.connect(len(headers), remainder) - self.assertEqual(headers.height, 32259) - - async def test_bounds(self): - headers = MainHeaders(':memory:') - await headers.connect(0, self.get_bytes(block_bytes(3001))) - self.assertEqual(headers.height, 3000) - with self.assertRaises(IndexError): - _ = headers[3001] - with self.assertRaises(IndexError): - _ = headers[-1] - self.assertIsNotNone(headers[3000]) - self.assertIsNotNone(headers[0]) - - async def test_repair(self): - headers = MainHeaders(':memory:') - await headers.connect(0, self.get_bytes(block_bytes(3001))) - self.assertEqual(headers.height, 3000) - await headers.repair() - self.assertEqual(headers.height, 3000) - # corrupt the middle of it - headers.io.seek(block_bytes(1500)) - headers.io.write(b"wtf") - await headers.repair() - self.assertEqual(headers.height, 1499) - self.assertEqual(len(headers), 1500) - # corrupt by appending - headers.io.seek(block_bytes(len(headers))) - headers.io.write(b"appending") - await headers.repair() - self.assertEqual(headers.height, 1499) - await headers.connect(len(headers), self.get_bytes(block_bytes(3001 - 1500), after=block_bytes(1500))) - self.assertEqual(headers.height, 3000) - - async def test_checkpointed_writer(self): - headers = MainHeaders(':memory:') - headers.checkpoint = 100, hexlify(sha256(self.get_bytes(block_bytes(100)))) - genblocks = lambda start, end: self.get_bytes(block_bytes(end - start), block_bytes(start)) - async with headers.checkpointed_connector() as buff: - buff.write(genblocks(0, 10)) - self.assertEqual(len(headers), 10) - async with headers.checkpointed_connector() as buff: - buff.write(genblocks(10, 100)) - self.assertEqual(len(headers), 100) - headers = MainHeaders(':memory:') - async with headers.checkpointed_connector() as buff: - buff.write(genblocks(0, 300)) - self.assertEqual(len(headers), 300) - - async def test_concurrency(self): - BLOCKS = 30 - headers_temporary_file = tempfile.mktemp() - headers = MainHeaders(headers_temporary_file) - await headers.open() - self.addCleanup(os.remove, headers_temporary_file) - async def writer(): - for block_index in range(BLOCKS): - await headers.connect(block_index, self.get_bytes(block_bytes(block_index + 1), block_bytes(block_index))) - async def reader(): - for block_index in range(BLOCKS): - while len(headers) < block_index: - await asyncio.sleep(0.000001) - assert headers[block_index]['block_height'] == block_index - reader_task = asyncio.create_task(reader()) - await writer() - await reader_task \ No newline at end of file diff --git a/torba/tests/client_tests/unit/test_ledger.py b/torba/tests/client_tests/unit/test_ledger.py deleted file mode 100644 index ecd2377b7..000000000 --- a/torba/tests/client_tests/unit/test_ledger.py +++ /dev/null @@ -1,170 +0,0 @@ -import os -from binascii import hexlify - -from torba.coin.bitcoinsegwit import MainNetLedger -from torba.client.wallet import Wallet - -from client_tests.unit.test_transaction import get_transaction, get_output -from client_tests.unit.test_headers import BitcoinHeadersTestCase, block_bytes - - -class MockNetwork: - - def __init__(self, history, transaction): - self.history = history - self.transaction = transaction - self.address = None - self.get_history_called = [] - self.get_transaction_called = [] - self.is_connected = False - - def retriable_call(self, function, *args, **kwargs): - return function(*args, **kwargs) - - async def get_history(self, address): - self.get_history_called.append(address) - self.address = address - return self.history - - async def get_merkle(self, txid, height): - return {'merkle': ['abcd01'], 'pos': 1} - - async def get_transaction(self, tx_hash, _=None): - self.get_transaction_called.append(tx_hash) - return self.transaction[tx_hash] - - -class LedgerTestCase(BitcoinHeadersTestCase): - - async def asyncSetUp(self): - self.ledger = MainNetLedger({ - 'db': MainNetLedger.database_class(':memory:'), - 'headers': MainNetLedger.headers_class(':memory:') - }) - await self.ledger.db.open() - - async def asyncTearDown(self): - await self.ledger.db.close() - - def make_header(self, **kwargs): - header = { - 'bits': 486604799, - 'block_height': 0, - 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b', - 'nonce': 2083236893, - 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000', - 'timestamp': 1231006505, - 'version': 1 - } - header.update(kwargs) - header['merkle_root'] = header['merkle_root'].ljust(64, b'a') - header['prev_block_hash'] = header['prev_block_hash'].ljust(64, b'0') - return self.ledger.headers.serialize(header) - - def add_header(self, **kwargs): - serialized = self.make_header(**kwargs) - self.ledger.headers.io.seek(0, os.SEEK_END) - self.ledger.headers.io.write(serialized) - self.ledger.headers._size = None - - -class TestSynchronization(LedgerTestCase): - - async def test_update_history(self): - account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba") - address = await account.receiving.get_or_create_usable_address() - address_details = await self.ledger.db.get_address(address=address) - self.assertIsNone(address_details['history']) - - self.add_header(block_height=0, merkle_root=b'abcd04') - self.add_header(block_height=1, merkle_root=b'abcd04') - self.add_header(block_height=2, merkle_root=b'abcd04') - self.add_header(block_height=3, merkle_root=b'abcd04') - self.ledger.network = MockNetwork([ - {'tx_hash': 'abcd01', 'height': 0}, - {'tx_hash': 'abcd02', 'height': 1}, - {'tx_hash': 'abcd03', 'height': 2}, - ], { - 'abcd01': hexlify(get_transaction(get_output(1)).raw), - 'abcd02': hexlify(get_transaction(get_output(2)).raw), - 'abcd03': hexlify(get_transaction(get_output(3)).raw), - }) - await self.ledger.update_history(address, '') - self.assertListEqual(self.ledger.network.get_history_called, [address]) - self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd01', 'abcd02', 'abcd03']) - - address_details = await self.ledger.db.get_address(address=address) - self.assertEqual( - address_details['history'], - '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:' - 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:' - 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:' - ) - - self.ledger.network.get_history_called = [] - self.ledger.network.get_transaction_called = [] - await self.ledger.update_history(address, '') - self.assertListEqual(self.ledger.network.get_history_called, [address]) - self.assertListEqual(self.ledger.network.get_transaction_called, []) - - self.ledger.network.history.append({'tx_hash': 'abcd04', 'height': 3}) - self.ledger.network.transaction['abcd04'] = hexlify(get_transaction(get_output(4)).raw) - self.ledger.network.get_history_called = [] - self.ledger.network.get_transaction_called = [] - await self.ledger.update_history(address, '') - self.assertListEqual(self.ledger.network.get_history_called, [address]) - self.assertListEqual(self.ledger.network.get_transaction_called, ['abcd04']) - address_details = await self.ledger.db.get_address(address=address) - self.assertEqual( - address_details['history'], - '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:' - 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:' - 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:' - '047cf1d53ef68f0fd586d46f90c09ff8e57a4180f67e7f4b8dd0135c3741e828:3:' - ) - - -class MocHeaderNetwork(MockNetwork): - def __init__(self, responses): - super().__init__(None, None) - self.responses = responses - - async def get_headers(self, height, blocks): - return self.responses[height] - - -class BlockchainReorganizationTests(LedgerTestCase): - - async def test_1_block_reorganization(self): - self.ledger.network = MocHeaderNetwork({ - 20: {'height': 20, 'count': 5, 'hex': hexlify( - self.get_bytes(after=block_bytes(20), upto=block_bytes(5)) - )}, - 25: {'height': 25, 'count': 0, 'hex': b''} - }) - headers = self.ledger.headers - await headers.connect(0, self.get_bytes(upto=block_bytes(20))) - self.add_header(block_height=len(headers)) - self.assertEqual(headers.height, 20) - await self.ledger.receive_header([{ - 'height': 21, 'hex': hexlify(self.make_header(block_height=21)) - }]) - - async def test_3_block_reorganization(self): - self.ledger.network = MocHeaderNetwork({ - 20: {'height': 20, 'count': 5, 'hex': hexlify( - self.get_bytes(after=block_bytes(20), upto=block_bytes(5)) - )}, - 21: {'height': 21, 'count': 1, 'hex': hexlify(self.make_header(block_height=21))}, - 22: {'height': 22, 'count': 1, 'hex': hexlify(self.make_header(block_height=22))}, - 25: {'height': 25, 'count': 0, 'hex': b''} - }) - headers = self.ledger.headers - await headers.connect(0, self.get_bytes(upto=block_bytes(20))) - self.add_header(block_height=len(headers)) - self.add_header(block_height=len(headers)) - self.add_header(block_height=len(headers)) - self.assertEqual(headers.height, 22) - await self.ledger.receive_header(({ - 'height': 23, 'hex': hexlify(self.make_header(block_height=23)) - },)) diff --git a/torba/tests/client_tests/unit/test_script.py b/torba/tests/client_tests/unit/test_script.py deleted file mode 100644 index ae48179db..000000000 --- a/torba/tests/client_tests/unit/test_script.py +++ /dev/null @@ -1,218 +0,0 @@ -import unittest -from binascii import hexlify, unhexlify - -from torba.client.bcd_data_stream import BCDataStream -from torba.client.basescript import Template, ParseError, tokenize, push_data -from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL -from torba.client.basescript import BaseInputScript, BaseOutputScript - - -def parse(opcodes, source): - template = Template('test', opcodes) - s = BCDataStream() - for t in source: - if isinstance(t, bytes): - s.write_many(push_data(t)) - elif isinstance(t, int): - s.write_uint8(t) - else: - raise ValueError() - s.reset() - return template.parse(tokenize(s)) - - -class TestScriptTemplates(unittest.TestCase): - - def test_push_data(self): - self.assertDictEqual(parse( - (PUSH_SINGLE('script_hash'),), - (b'abcdef',) - ), { - 'script_hash': b'abcdef' - } - ) - self.assertDictEqual(parse( - (PUSH_SINGLE('first'), PUSH_INTEGER('rating')), - (b'Satoshi', (1000).to_bytes(2, 'little')) - ), { - 'first': b'Satoshi', - 'rating': 1000, - } - ) - self.assertDictEqual(parse( - (OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL), - (OP_HASH160, b'abcdef', OP_EQUAL) - ), { - 'script_hash': b'abcdef' - } - ) - - def test_push_data_many(self): - self.assertDictEqual(parse( - (PUSH_MANY('names'),), - (b'amit',) - ), { - 'names': [b'amit'] - } - ) - self.assertDictEqual(parse( - (PUSH_MANY('names'),), - (b'jeremy', b'amit', b'victor') - ), { - 'names': [b'jeremy', b'amit', b'victor'] - } - ) - self.assertDictEqual(parse( - (OP_HASH160, PUSH_MANY('names'), OP_EQUAL), - (OP_HASH160, b'grin', b'jack', OP_EQUAL) - ), { - 'names': [b'grin', b'jack'] - } - ) - - def test_push_data_mixed(self): - self.assertDictEqual(parse( - (PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')), - (b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH') - ), { - 'CEO': b'jeremy', - 'CTO': b'grin', - 'Devs': [b'lex', b'amit', b'victor', b'jack'], - 'State': b'NH' - } - ) - - def test_push_data_many_separated(self): - self.assertDictEqual(parse( - (PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')), - (b'jeremy', b'grin', OP_HASH160, b'lex', b'jack') - ), { - 'Chiefs': [b'jeremy', b'grin'], - 'Devs': [b'lex', b'jack'] - } - ) - - def test_push_data_many_not_separated(self): - with self.assertRaisesRegex(ParseError, 'consecutive PUSH_MANY'): - parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack')) - - -class TestRedeemPubKeyHash(unittest.TestCase): - - def redeem_pubkey_hash(self, sig, pubkey): - # this checks that factory function correctly sets up the script - src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey)) - self.assertEqual(src1.template.name, 'pubkey_hash') - self.assertEqual(hexlify(src1.values['signature']), sig) - self.assertEqual(hexlify(src1.values['pubkey']), pubkey) - # now we test that it will round trip - src2 = BaseInputScript(src1.source) - self.assertEqual(src2.template.name, 'pubkey_hash') - self.assertEqual(hexlify(src2.values['signature']), sig) - self.assertEqual(hexlify(src2.values['pubkey']), pubkey) - return hexlify(src1.source) - - def test_redeem_pubkey_hash_1(self): - self.assertEqual( - self.redeem_pubkey_hash( - b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e' - b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301', - b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b' - ), - b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d' - b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3' - b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b' - ) - - -class TestRedeemScriptHash(unittest.TestCase): - - def redeem_script_hash(self, sigs, pubkeys): - # this checks that factory function correctly sets up the script - src1 = BaseInputScript.redeem_script_hash( - [unhexlify(sig) for sig in sigs], - [unhexlify(pubkey) for pubkey in pubkeys] - ) - subscript1 = src1.values['script'] - self.assertEqual(src1.template.name, 'script_hash') - self.assertListEqual([hexlify(v) for v in src1.values['signatures']], sigs) - self.assertListEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys) - self.assertEqual(subscript1.values['signatures_count'], len(sigs)) - self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys)) - # now we test that it will round trip - src2 = BaseInputScript(src1.source) - subscript2 = src2.values['script'] - self.assertEqual(src2.template.name, 'script_hash') - self.assertListEqual([hexlify(v) for v in src2.values['signatures']], sigs) - self.assertListEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys) - self.assertEqual(subscript2.values['signatures_count'], len(sigs)) - self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys)) - return hexlify(src1.source) - - def test_redeem_script_hash_1(self): - self.assertEqual( - self.redeem_script_hash([ - b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575' - b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401', - b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68' - b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01', - b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd' - b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01' - ], [ - b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4', - b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692', - b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c', - b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb', - b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89' - ]), - b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e' - b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd' - b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba' - b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a' - b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3' - b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103' - b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2' - b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89' - b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171' - b'ad0abeaa8955ae' - ) - - -class TestPayPubKeyHash(unittest.TestCase): - - def pay_pubkey_hash(self, pubkey_hash): - # this checks that factory function correctly sets up the script - src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash)) - self.assertEqual(src1.template.name, 'pay_pubkey_hash') - self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash) - # now we test that it will round trip - src2 = BaseOutputScript(src1.source) - self.assertEqual(src2.template.name, 'pay_pubkey_hash') - self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash) - return hexlify(src1.source) - - def test_pay_pubkey_hash_1(self): - self.assertEqual( - self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'), - b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac' - ) - - -class TestPayScriptHash(unittest.TestCase): - - def pay_script_hash(self, script_hash): - # this checks that factory function correctly sets up the script - src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash)) - self.assertEqual(src1.template.name, 'pay_script_hash') - self.assertEqual(hexlify(src1.values['script_hash']), script_hash) - # now we test that it will round trip - src2 = BaseOutputScript(src1.source) - self.assertEqual(src2.template.name, 'pay_script_hash') - self.assertEqual(hexlify(src2.values['script_hash']), script_hash) - return hexlify(src1.source) - - def test_pay_pubkey_hash_1(self): - self.assertEqual( - self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'), - b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87' - ) diff --git a/torba/tests/client_tests/unit/test_transaction.py b/torba/tests/client_tests/unit/test_transaction.py deleted file mode 100644 index 12a0aca3f..000000000 --- a/torba/tests/client_tests/unit/test_transaction.py +++ /dev/null @@ -1,345 +0,0 @@ -import unittest -from binascii import hexlify, unhexlify -from itertools import cycle - -from torba.testcase import AsyncioTestCase - -from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class -from torba.client.wallet import Wallet -from torba.client.constants import CENT, COIN - - -NULL_HASH = b'\x00'*32 -FEE_PER_BYTE = 50 -FEE_PER_CHAR = 200000 - - -def get_output(amount=CENT, pubkey_hash=NULL_HASH, height=-2): - return ledger_class.transaction_class(height=height) \ - .add_outputs([ledger_class.transaction_class.output_class.pay_pubkey_hash(amount, pubkey_hash)]) \ - .outputs[0] - - -def get_input(amount=CENT, pubkey_hash=NULL_HASH): - return ledger_class.transaction_class.input_class.spend(get_output(amount, pubkey_hash)) - - -def get_transaction(txo=None): - return ledger_class.transaction_class() \ - .add_inputs([get_input()]) \ - .add_outputs([txo or ledger_class.transaction_class.output_class.pay_pubkey_hash(CENT, NULL_HASH)]) - - -class TestSizeAndFeeEstimation(AsyncioTestCase): - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - - def test_output_size_and_fee(self): - txo = get_output() - self.assertEqual(txo.size, 46) - self.assertEqual(txo.get_fee(self.ledger), 46 * FEE_PER_BYTE) - - def test_input_size_and_fee(self): - txi = get_input() - self.assertEqual(txi.size, 148) - self.assertEqual(txi.get_fee(self.ledger), 148 * FEE_PER_BYTE) - - def test_transaction_size_and_fee(self): - tx = get_transaction() - self.assertEqual(tx.size, 204) - self.assertEqual(tx.base_size, tx.size - tx.inputs[0].size - tx.outputs[0].size) - self.assertEqual(tx.get_base_fee(self.ledger), FEE_PER_BYTE * tx.base_size) - - -class TestAccountBalanceImpactFromTransaction(unittest.TestCase): - - def test_is_my_account_not_set(self): - tx = get_transaction() - with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"): - _ = tx.net_account_balance - tx.inputs[0].txo_ref.txo.is_my_account = True - with self.assertRaisesRegex(ValueError, "Cannot access net_account_balance"): - _ = tx.net_account_balance - tx.outputs[0].is_my_account = True - # all inputs/outputs are set now so it should work - _ = tx.net_account_balance - - def test_paying_from_my_account_to_other_account(self): - tx = ledger_class.transaction_class() \ - .add_inputs([get_input(300*CENT)]) \ - .add_outputs([get_output(190*CENT, NULL_HASH), - get_output(100*CENT, NULL_HASH)]) - tx.inputs[0].txo_ref.txo.is_my_account = True - tx.outputs[0].is_my_account = False - tx.outputs[1].is_my_account = True - self.assertEqual(tx.net_account_balance, -200*CENT) - - def test_paying_from_other_account_to_my_account(self): - tx = ledger_class.transaction_class() \ - .add_inputs([get_input(300*CENT)]) \ - .add_outputs([get_output(190*CENT, NULL_HASH), - get_output(100*CENT, NULL_HASH)]) - tx.inputs[0].txo_ref.txo.is_my_account = False - tx.outputs[0].is_my_account = True - tx.outputs[1].is_my_account = False - self.assertEqual(tx.net_account_balance, 190*CENT) - - def test_paying_from_my_account_to_my_account(self): - tx = ledger_class.transaction_class() \ - .add_inputs([get_input(300*CENT)]) \ - .add_outputs([get_output(190*CENT, NULL_HASH), - get_output(100*CENT, NULL_HASH)]) - tx.inputs[0].txo_ref.txo.is_my_account = True - tx.outputs[0].is_my_account = True - tx.outputs[1].is_my_account = True - self.assertEqual(tx.net_account_balance, -10*CENT) # lost to fee - - -class TestTransactionSerialization(unittest.TestCase): - - def test_genesis_transaction(self): - raw = unhexlify( - '01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04' - 'ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e20' - '6272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01' - '000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4c' - 'ef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000' - ) - tx = ledger_class.transaction_class(raw) - self.assertEqual(tx.version, 1) - self.assertEqual(tx.locktime, 0) - self.assertEqual(len(tx.inputs), 1) - self.assertEqual(len(tx.outputs), 1) - - coinbase = tx.inputs[0] - self.assertTrue(coinbase.txo_ref.is_null, NULL_HASH) - self.assertEqual(coinbase.txo_ref.position, 0xFFFFFFFF) - self.assertEqual(coinbase.sequence, 4294967295) - self.assertIsNotNone(coinbase.coinbase) - self.assertIsNone(coinbase.script) - self.assertEqual( - coinbase.coinbase[8:], - b'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks' - ) - - out = tx.outputs[0] - self.assertEqual(out.amount, 5000000000) - self.assertEqual(out.position, 0) - self.assertTrue(out.script.is_pay_pubkey) - self.assertFalse(out.script.is_pay_pubkey_hash) - self.assertFalse(out.script.is_pay_script_hash) - - tx._reset() - self.assertEqual(tx.raw, raw) - - def test_coinbase_transaction(self): - raw = unhexlify( - '01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4e03' - '1f5a070473319e592f4254432e434f4d2f4e59412ffabe6d6dcceb2a9d0444c51cabc4ee97a1a000036ca0' - 'cb48d25b94b78c8367d8b868454b0100000000000000c0309b21000008c5f8f80000ffffffff0291920b5d' - '0000000017a914e083685a1097ce1ea9e91987ab9e94eae33d8a13870000000000000000266a24aa21a9ed' - 'e6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b400000000' - ) - tx = ledger_class.transaction_class(raw) - self.assertEqual(tx.version, 1) - self.assertEqual(tx.locktime, 0) - self.assertEqual(len(tx.inputs), 1) - self.assertEqual(len(tx.outputs), 2) - - coinbase = tx.inputs[0] - self.assertTrue(coinbase.txo_ref.is_null) - self.assertEqual(coinbase.txo_ref.position, 0xFFFFFFFF) - self.assertEqual(coinbase.sequence, 4294967295) - self.assertIsNotNone(coinbase.coinbase) - self.assertIsNone(coinbase.script) - self.assertEqual(coinbase.coinbase[9:22], b'/BTC.COM/NYA/') - - out = tx.outputs[0] - self.assertEqual(out.amount, 1561039505) - self.assertEqual(out.position, 0) - self.assertFalse(out.script.is_pay_pubkey) - self.assertFalse(out.script.is_pay_pubkey_hash) - self.assertTrue(out.script.is_pay_script_hash) - self.assertFalse(out.script.is_return_data) - - out1 = tx.outputs[1] - self.assertEqual(out1.amount, 0) - self.assertEqual(out1.position, 1) - self.assertEqual( - hexlify(out1.script.values['data']), - b'aa21a9ede6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b4' - ) - self.assertTrue(out1.script.is_return_data) - self.assertFalse(out1.script.is_pay_pubkey) - self.assertFalse(out1.script.is_pay_pubkey_hash) - self.assertFalse(out1.script.is_pay_script_hash) - - tx._reset() - self.assertEqual(tx.raw, raw) - - -class TestTransactionSigning(AsyncioTestCase): - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - await self.ledger.db.open() - - async def asyncTearDown(self): - await self.ledger.db.close() - - async def test_sign(self): - account = self.ledger.account_class.from_dict( - self.ledger, Wallet(), { - "seed": "carbon smart garage balance margin twelve chest sword " - "toast envelope bottom stomach absent" - - } - ) - - await account.ensure_address_gap() - address1, address2 = await account.receiving.get_addresses(limit=2) - pubkey_hash1 = self.ledger.address_to_hash160(address1) - pubkey_hash2 = self.ledger.address_to_hash160(address2) - - tx_class = ledger_class.transaction_class - - tx = tx_class() \ - .add_inputs([tx_class.input_class.spend(get_output(2*COIN, pubkey_hash1))]) \ - .add_outputs([tx_class.output_class.pay_pubkey_hash(int(1.9*COIN), pubkey_hash2)]) \ - - await tx.sign([account]) - - self.assertEqual( - hexlify(tx.inputs[0].script.values['signature']), - b'304402205a1df8cd5d2d2fa5934b756883d6c07e4f83e1350c740992d47a12422' - b'226aaa202200098ac8675827aea2b0d6f0e49566143a95d523e311d342172cd99e2021e47cb01' - ) - - -class TransactionIOBalancing(AsyncioTestCase): - - async def asyncSetUp(self): - self.ledger = ledger_class({ - 'db': ledger_class.database_class(':memory:'), - 'headers': ledger_class.headers_class(':memory:'), - }) - await self.ledger.db.open() - self.account = self.ledger.account_class.from_dict( - self.ledger, Wallet(), { - "seed": "carbon smart garage balance margin twelve chest sword " - "toast envelope bottom stomach absent" - } - ) - - addresses = await self.account.ensure_address_gap() - self.pubkey_hash = [self.ledger.address_to_hash160(a) for a in addresses] - self.hash_cycler = cycle(self.pubkey_hash) - - async def asyncTearDown(self): - await self.ledger.db.close() - - def txo(self, amount, address=None): - return get_output(int(amount*COIN), address or next(self.hash_cycler)) - - def txi(self, txo): - return ledger_class.transaction_class.input_class.spend(txo) - - def tx(self, inputs, outputs): - return ledger_class.transaction_class.create(inputs, outputs, [self.account], self.account) - - async def create_utxos(self, amounts): - utxos = [self.txo(amount) for amount in amounts] - - self.funding_tx = ledger_class.transaction_class(is_verified=True) \ - .add_inputs([self.txi(self.txo(sum(amounts)+0.1))]) \ - .add_outputs(utxos) - - await self.ledger.db.insert_transaction(self.funding_tx) - - for utxo in utxos: - await self.ledger.db.save_transaction_io( - self.funding_tx, - self.ledger.hash160_to_address(utxo.script.values['pubkey_hash']), - utxo.script.values['pubkey_hash'], '' - ) - - return utxos - - @staticmethod - def inputs(tx): - return [round(i.amount/COIN, 2) for i in tx.inputs] - - @staticmethod - def outputs(tx): - return [round(o.amount/COIN, 2) for o in tx.outputs] - - async def test_basic_use_cases(self): - self.ledger.fee_per_byte = int(.01*CENT) - - # available UTXOs for filling missing inputs - utxos = await self.create_utxos([ - 1, 1, 3, 5, 10 - ]) - - # pay 3 coins (3.02 w/ fees) - tx = await self.tx( - [], # inputs - [self.txo(3)] # outputs - ) - # best UTXO match is 5 (as UTXO 3 will be short 0.02 to cover fees) - self.assertListEqual(self.inputs(tx), [5]) - # a change of 1.98 is added to reach balance - self.assertListEqual(self.outputs(tx), [3, 1.98]) - - await self.ledger.release_outputs(utxos) - - # pay 2.98 coins (3.00 w/ fees) - tx = await self.tx( - [], # inputs - [self.txo(2.98)] # outputs - ) - # best UTXO match is 3 and no change is needed - self.assertListEqual(self.inputs(tx), [3]) - self.assertListEqual(self.outputs(tx), [2.98]) - - await self.ledger.release_outputs(utxos) - - # supplied input and output, but input is not enough to cover output - tx = await self.tx( - [self.txi(self.txo(10))], # inputs - [self.txo(11)] # outputs - ) - # additional input is chosen (UTXO 3) - self.assertListEqual([10, 3], self.inputs(tx)) - # change is now needed to consume extra input - self.assertListEqual([11, 1.96], self.outputs(tx)) - - await self.ledger.release_outputs(utxos) - - # liquidating a UTXO - tx = await self.tx( - [self.txi(self.txo(10))], # inputs - [] # outputs - ) - self.assertListEqual([10], self.inputs(tx)) - # missing change added to consume the amount - self.assertListEqual([9.98], self.outputs(tx)) - - await self.ledger.release_outputs(utxos) - - # liquidating at a loss, requires adding extra inputs - tx = await self.tx( - [self.txi(self.txo(0.01))], # inputs - [] # outputs - ) - # UTXO 1 is added to cover some of the fee - self.assertListEqual([0.01, 1], self.inputs(tx)) - # change is now needed to consume extra input - self.assertListEqual([0.97], self.outputs(tx)) diff --git a/torba/torba.png b/torba/torba.png deleted file mode 100644 index 2e06e8e16..000000000 Binary files a/torba/torba.png and /dev/null differ diff --git a/torba/torba/__init__.py b/torba/torba/__init__.py deleted file mode 100644 index 4b90c3d19..000000000 --- a/torba/torba/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -__path__: str = __import__('pkgutil').extend_path(__path__, __name__) -__version__ = '0.5.7' diff --git a/torba/torba/client/__init__.py b/torba/torba/client/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/torba/torba/client/words/__init__.py b/torba/torba/client/words/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/torba/torba/server/__init__.py b/torba/torba/server/__init__.py deleted file mode 100644 index b7f2cf595..000000000 --- a/torba/torba/server/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .server import Server diff --git a/torba/torba/server/block_processor.py b/torba/torba/server/block_processor.py deleted file mode 100644 index 0cc365d06..000000000 --- a/torba/torba/server/block_processor.py +++ /dev/null @@ -1,714 +0,0 @@ -# Copyright (c) 2016-2017, Neil Booth -# Copyright (c) 2017, the ElectrumX authors -# -# All rights reserved. -# -# See the file "LICENCE" for information about the copyright -# and warranty status of this software. - -"""Block prefetcher and chain processor.""" - - -import asyncio -from struct import pack, unpack -import time - -import torba -from torba.server.daemon import DaemonError -from torba.server.hash import hash_to_hex_str, HASHX_LEN -from torba.server.util import chunks, class_logger -from torba.server.db import FlushData - - -class Prefetcher: - """Prefetches blocks (in the forward direction only).""" - - def __init__(self, daemon, coin, blocks_event): - self.logger = class_logger(__name__, self.__class__.__name__) - self.daemon = daemon - self.coin = coin - self.blocks_event = blocks_event - self.blocks = [] - self.caught_up = False - # Access to fetched_height should be protected by the semaphore - self.fetched_height = None - self.semaphore = asyncio.Semaphore() - self.refill_event = asyncio.Event() - # The prefetched block cache size. The min cache size has - # little effect on sync time. - self.cache_size = 0 - self.min_cache_size = 10 * 1024 * 1024 - # This makes the first fetch be 10 blocks - self.ave_size = self.min_cache_size // 10 - self.polling_delay = 5 - - async def main_loop(self, bp_height): - """Loop forever polling for more blocks.""" - await self.reset_height(bp_height) - while True: - try: - # Sleep a while if there is nothing to prefetch - await self.refill_event.wait() - if not await self._prefetch_blocks(): - await asyncio.sleep(self.polling_delay) - except DaemonError as e: - self.logger.info(f'ignoring daemon error: {e}') - - def get_prefetched_blocks(self): - """Called by block processor when it is processing queued blocks.""" - blocks = self.blocks - self.blocks = [] - self.cache_size = 0 - self.refill_event.set() - return blocks - - async def reset_height(self, height): - """Reset to prefetch blocks from the block processor's height. - - Used in blockchain reorganisations. This coroutine can be - called asynchronously to the _prefetch_blocks coroutine so we - must synchronize with a semaphore. - """ - async with self.semaphore: - self.blocks.clear() - self.cache_size = 0 - self.fetched_height = height - self.refill_event.set() - - daemon_height = await self.daemon.height() - behind = daemon_height - height - if behind > 0: - self.logger.info(f'catching up to daemon height {daemon_height:,d} ' - f'({behind:,d} blocks behind)') - else: - self.logger.info(f'caught up to daemon height {daemon_height:,d}') - - async def _prefetch_blocks(self): - """Prefetch some blocks and put them on the queue. - - Repeats until the queue is full or caught up. - """ - daemon = self.daemon - daemon_height = await daemon.height() - async with self.semaphore: - while self.cache_size < self.min_cache_size: - # Try and catch up all blocks but limit to room in cache. - # Constrain fetch count to between 0 and 500 regardless; - # testnet can be lumpy. - cache_room = self.min_cache_size // self.ave_size - count = min(daemon_height - self.fetched_height, cache_room) - count = min(500, max(count, 0)) - if not count: - self.caught_up = True - return False - - first = self.fetched_height + 1 - hex_hashes = await daemon.block_hex_hashes(first, count) - if self.caught_up: - self.logger.info('new block height {:,d} hash {}' - .format(first + count-1, hex_hashes[-1])) - blocks = await daemon.raw_blocks(hex_hashes) - - assert count == len(blocks) - - # Special handling for genesis block - if first == 0: - blocks[0] = self.coin.genesis_block(blocks[0]) - self.logger.info(f'verified genesis block with hash {hex_hashes[0]}') - - # Update our recent average block size estimate - size = sum(len(block) for block in blocks) - if count >= 10: - self.ave_size = size // count - else: - self.ave_size = (size + (10 - count) * self.ave_size) // 10 - - self.blocks.extend(blocks) - self.cache_size += size - self.fetched_height += count - self.blocks_event.set() - - self.refill_event.clear() - return True - - -class ChainError(Exception): - """Raised on error processing blocks.""" - - -class BlockProcessor: - """Process blocks and update the DB state to match. - - Employ a prefetcher to prefetch blocks in batches for processing. - Coordinate backing up in case of chain reorganisations. - """ - - def __init__(self, env, db, daemon, notifications): - self.env = env - self.db = db - self.daemon = daemon - self.notifications = notifications - - self.coin = env.coin - self.blocks_event = asyncio.Event() - self.prefetcher = Prefetcher(daemon, env.coin, self.blocks_event) - self.logger = class_logger(__name__, self.__class__.__name__) - - # Meta - self.next_cache_check = 0 - self.touched = set() - self.reorg_count = 0 - - # Caches of unflushed items. - self.headers = [] - self.tx_hashes = [] - self.undo_infos = [] - - # UTXO cache - self.utxo_cache = {} - self.db_deletes = [] - - # If the lock is successfully acquired, in-memory chain state - # is consistent with self.height - self.state_lock = asyncio.Lock() - - async def run_in_thread_with_lock(self, func, *args): - # Run in a thread to prevent blocking. Shielded so that - # cancellations from shutdown don't lose work - when the task - # completes the data will be flushed and then we shut down. - # Take the state lock to be certain in-memory state is - # consistent and not being updated elsewhere. - async def run_in_thread_locked(): - async with self.state_lock: - return await asyncio.get_event_loop().run_in_executor(None, func, *args) - return await asyncio.shield(run_in_thread_locked()) - - async def check_and_advance_blocks(self, raw_blocks): - """Process the list of raw blocks passed. Detects and handles - reorgs. - """ - if not raw_blocks: - return - first = self.height + 1 - blocks = [self.coin.block(raw_block, first + n) - for n, raw_block in enumerate(raw_blocks)] - headers = [block.header for block in blocks] - hprevs = [self.coin.header_prevhash(h) for h in headers] - chain = [self.tip] + [self.coin.header_hash(h) for h in headers[:-1]] - - if hprevs == chain: - start = time.time() - await self.run_in_thread_with_lock(self.advance_blocks, blocks) - await self._maybe_flush() - if not self.db.first_sync: - s = '' if len(blocks) == 1 else 's' - self.logger.info('processed {:,d} block{} in {:.1f}s' - .format(len(blocks), s, - time.time() - start)) - if self._caught_up_event.is_set(): - await self.notifications.on_block(self.touched, self.height) - self.touched = set() - elif hprevs[0] != chain[0]: - await self.reorg_chain() - else: - # It is probably possible but extremely rare that what - # bitcoind returns doesn't form a chain because it - # reorg-ed the chain as it was processing the batched - # block hash requests. Should this happen it's simplest - # just to reset the prefetcher and try again. - self.logger.warning('daemon blocks do not form a chain; ' - 'resetting the prefetcher') - await self.prefetcher.reset_height(self.height) - - async def reorg_chain(self, count=None): - """Handle a chain reorganisation. - - Count is the number of blocks to simulate a reorg, or None for - a real reorg.""" - if count is None: - self.logger.info('chain reorg detected') - else: - self.logger.info(f'faking a reorg of {count:,d} blocks') - await self.flush(True) - - async def get_raw_blocks(last_height, hex_hashes): - heights = range(last_height, last_height - len(hex_hashes), -1) - try: - blocks = [self.db.read_raw_block(height) for height in heights] - self.logger.info(f'read {len(blocks)} blocks from disk') - return blocks - except FileNotFoundError: - return await self.daemon.raw_blocks(hex_hashes) - - def flush_backup(): - # self.touched can include other addresses which is - # harmless, but remove None. - self.touched.discard(None) - self.db.flush_backup(self.flush_data(), self.touched) - - start, last, hashes = await self.reorg_hashes(count) - # Reverse and convert to hex strings. - hashes = [hash_to_hex_str(hash) for hash in reversed(hashes)] - for hex_hashes in chunks(hashes, 50): - raw_blocks = await get_raw_blocks(last, hex_hashes) - await self.run_in_thread_with_lock(self.backup_blocks, raw_blocks) - await self.run_in_thread_with_lock(flush_backup) - last -= len(raw_blocks) - await self.prefetcher.reset_height(self.height) - - async def reorg_hashes(self, count): - """Return a pair (start, last, hashes) of blocks to back up during a - reorg. - - The hashes are returned in order of increasing height. Start - is the height of the first hash, last of the last. - """ - start, count = await self.calc_reorg_range(count) - last = start + count - 1 - s = '' if count == 1 else 's' - self.logger.info(f'chain was reorganised replacing {count:,d} ' - f'block{s} at heights {start:,d}-{last:,d}') - - return start, last, await self.db.fs_block_hashes(start, count) - - async def calc_reorg_range(self, count): - """Calculate the reorg range""" - - def diff_pos(hashes1, hashes2): - """Returns the index of the first difference in the hash lists. - If both lists match returns their length.""" - for n, (hash1, hash2) in enumerate(zip(hashes1, hashes2)): - if hash1 != hash2: - return n - return len(hashes) - - if count is None: - # A real reorg - start = self.height - 1 - count = 1 - while start > 0: - hashes = await self.db.fs_block_hashes(start, count) - hex_hashes = [hash_to_hex_str(hash) for hash in hashes] - d_hex_hashes = await self.daemon.block_hex_hashes(start, count) - n = diff_pos(hex_hashes, d_hex_hashes) - if n > 0: - start += n - break - count = min(count * 2, start) - start -= count - - count = (self.height - start) + 1 - else: - start = (self.height - count) + 1 - - return start, count - - def estimate_txs_remaining(self): - # Try to estimate how many txs there are to go - daemon_height = self.daemon.cached_height() - coin = self.coin - tail_count = daemon_height - max(self.height, coin.TX_COUNT_HEIGHT) - # Damp the initial enthusiasm - realism = max(2.0 - 0.9 * self.height / coin.TX_COUNT_HEIGHT, 1.0) - return (tail_count * coin.TX_PER_BLOCK + - max(coin.TX_COUNT - self.tx_count, 0)) * realism - - # - Flushing - def flush_data(self): - """The data for a flush. The lock must be taken.""" - assert self.state_lock.locked() - return FlushData(self.height, self.tx_count, self.headers, - self.tx_hashes, self.undo_infos, self.utxo_cache, - self.db_deletes, self.tip) - - async def flush(self, flush_utxos): - def flush(): - self.db.flush_dbs(self.flush_data(), flush_utxos, - self.estimate_txs_remaining) - await self.run_in_thread_with_lock(flush) - - async def _maybe_flush(self): - # If caught up, flush everything as client queries are - # performed on the DB. - if self._caught_up_event.is_set(): - await self.flush(True) - elif time.time() > self.next_cache_check: - flush_arg = self.check_cache_size() - if flush_arg is not None: - await self.flush(flush_arg) - self.next_cache_check = time.time() + 30 - - def check_cache_size(self): - """Flush a cache if it gets too big.""" - # Good average estimates based on traversal of subobjects and - # requesting size from Python (see deep_getsizeof). - one_MB = 1000*1000 - utxo_cache_size = len(self.utxo_cache) * 205 - db_deletes_size = len(self.db_deletes) * 57 - hist_cache_size = self.db.history.unflushed_memsize() - # Roughly ntxs * 32 + nblocks * 42 - tx_hash_size = ((self.tx_count - self.db.fs_tx_count) * 32 - + (self.height - self.db.fs_height) * 42) - utxo_MB = (db_deletes_size + utxo_cache_size) // one_MB - hist_MB = (hist_cache_size + tx_hash_size) // one_MB - - self.logger.info('our height: {:,d} daemon: {:,d} ' - 'UTXOs {:,d}MB hist {:,d}MB' - .format(self.height, self.daemon.cached_height(), - utxo_MB, hist_MB)) - - # Flush history if it takes up over 20% of cache memory. - # Flush UTXOs once they take up 80% of cache memory. - cache_MB = self.env.cache_MB - if utxo_MB + hist_MB >= cache_MB or hist_MB >= cache_MB // 5: - return utxo_MB >= cache_MB * 4 // 5 - return None - - def advance_blocks(self, blocks): - """Synchronously advance the blocks. - - It is already verified they correctly connect onto our tip. - """ - min_height = self.db.min_undo_height(self.daemon.cached_height()) - height = self.height - - for block in blocks: - height += 1 - undo_info = self.advance_txs( - height, block.transactions, self.coin.electrum_header(block.header, height) - ) - if height >= min_height: - self.undo_infos.append((undo_info, height)) - self.db.write_raw_block(block.raw, height) - - headers = [block.header for block in blocks] - self.height = height - self.headers.extend(headers) - self.tip = self.coin.header_hash(headers[-1]) - - def advance_txs(self, height, txs, header): - self.tx_hashes.append(b''.join(tx_hash for tx, tx_hash in txs)) - - # Use local vars for speed in the loops - undo_info = [] - tx_num = self.tx_count - script_hashX = self.coin.hashX_from_script - s_pack = pack - put_utxo = self.utxo_cache.__setitem__ - spend_utxo = self.spend_utxo - undo_info_append = undo_info.append - update_touched = self.touched.update - hashXs_by_tx = [] - append_hashXs = hashXs_by_tx.append - - for tx, tx_hash in txs: - hashXs = [] - append_hashX = hashXs.append - tx_numb = s_pack('= len(raw_blocks) - - coin = self.coin - for raw_block in raw_blocks: - # Check and update self.tip - block = coin.block(raw_block, self.height) - header_hash = coin.header_hash(block.header) - if header_hash != self.tip: - raise ChainError('backup block {} not tip {} at height {:,d}' - .format(hash_to_hex_str(header_hash), - hash_to_hex_str(self.tip), - self.height)) - self.tip = coin.header_prevhash(block.header) - self.backup_txs(block.transactions) - self.height -= 1 - self.db.tx_counts.pop() - - self.logger.info(f'backed up to height {self.height:,d}') - - def backup_txs(self, txs): - # Prevout values, in order down the block (coinbase first if present) - # undo_info is in reverse block order - undo_info = self.db.read_undo_info(self.height) - if undo_info is None: - raise ChainError(f'no undo information found for height {self.height:,d}') - n = len(undo_info) - - # Use local vars for speed in the loops - s_pack = pack - put_utxo = self.utxo_cache.__setitem__ - spend_utxo = self.spend_utxo - script_hashX = self.coin.hashX_from_script - touched = self.touched - undo_entry_len = 12 + HASHX_LEN - - for tx, tx_hash in reversed(txs): - for idx, txout in enumerate(tx.outputs): - # Spend the TX outputs. Be careful with unspendable - # outputs - we didn't save those in the first place. - hashX = script_hashX(txout.pk_script) - if hashX: - cache_value = spend_utxo(tx_hash, idx) - touched.add(cache_value[:-12]) - - # Restore the inputs - for txin in reversed(tx.inputs): - if txin.is_generation(): - continue - n -= undo_entry_len - undo_item = undo_info[n:n + undo_entry_len] - put_utxo(txin.prev_hash + s_pack(' 1: - tx_num, = unpack('False state. - first_sync = self.db.first_sync - self.db.first_sync = False - await self.flush(True) - if first_sync: - self.logger.info(f'{torba.__version__} synced to ' - f'height {self.height:,d}') - # Reopen for serving - await self.db.open_for_serving() - - async def _first_open_dbs(self): - await self.db.open_for_sync() - self.height = self.db.db_height - self.tip = self.db.db_tip - self.tx_count = self.db.db_tx_count - - # --- External API - - async def fetch_and_process_blocks(self, caught_up_event): - """Fetch, process and index blocks from the daemon. - - Sets caught_up_event when first caught up. Flushes to disk - and shuts down cleanly if cancelled. - - This is mainly because if, during initial sync ElectrumX is - asked to shut down when a large number of blocks have been - processed but not written to disk, it should write those to - disk before exiting, as otherwise a significant amount of work - could be lost. - """ - self._caught_up_event = caught_up_event - try: - await self._first_open_dbs() - await asyncio.wait([ - self.prefetcher.main_loop(self.height), - self._process_prefetched_blocks() - ]) - except asyncio.CancelledError: - raise - except: - self.logger.exception("Block processing failed!") - raise - finally: - # Shut down block processing - self.logger.info('flushing to DB for a clean shutdown...') - await self.flush(True) - self.db.close() - - def force_chain_reorg(self, count): - """Force a reorg of the given number of blocks. - - Returns True if a reorg is queued, false if not caught up. - """ - if self._caught_up_event.is_set(): - self.reorg_count = count - self.blocks_event.set() - return True - return False - - -class DecredBlockProcessor(BlockProcessor): - async def calc_reorg_range(self, count): - start, count = await super().calc_reorg_range(count) - if start > 0: - # A reorg in Decred can invalidate the previous block - start -= 1 - count += 1 - return start, count - - -class NamecoinBlockProcessor(BlockProcessor): - def advance_txs(self, txs): - result = super().advance_txs(txs) - - tx_num = self.tx_count - len(txs) - script_name_hashX = self.coin.name_hashX_from_script - update_touched = self.touched.update - hashXs_by_tx = [] - append_hashXs = hashXs_by_tx.append - - for tx, tx_hash in txs: - hashXs = [] - append_hashX = hashXs.append - - # Add the new UTXOs and associate them with the name script - for idx, txout in enumerate(tx.outputs): - # Get the hashX of the name script. Ignore non-name scripts. - hashX = script_name_hashX(txout.pk_script) - if hashX: - append_hashX(hashX) - - append_hashXs(hashXs) - update_touched(hashXs) - tx_num += 1 - - self.db.history.add_unflushed(hashXs_by_tx, self.tx_count - len(txs)) - - return result diff --git a/torba/torba/server/coins.py b/torba/torba/server/coins.py deleted file mode 100644 index 78dcb6840..000000000 --- a/torba/torba/server/coins.py +++ /dev/null @@ -1,2290 +0,0 @@ -# Copyright (c) 2016-2017, Neil Booth -# Copyright (c) 2017, the ElectrumX authors -# -# All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining -# a copy of this software and associated documentation files (the -# "Software"), to deal in the Software without restriction, including -# without limitation the rights to use, copy, modify, merge, publish, -# distribute, sublicense, and/or sell copies of the Software, and to -# permit persons to whom the Software is furnished to do so, subject to -# the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -"""Module providing coin abstraction. - -Anything coin-specific should go in this file and be subclassed where -necessary for appropriate handling. -""" - -from collections import namedtuple -import re -import struct -from decimal import Decimal -from hashlib import sha256 -from functools import partial -import base64 -from typing import Type, List - -import torba.server.util as util -from torba.server.hash import Base58, hash160, double_sha256, hash_to_hex_str -from torba.server.hash import HASHX_LEN, hex_str_to_hash -from torba.server.script import ScriptPubKey, OpCodes -import torba.server.tx as lib_tx -import torba.server.block_processor as block_proc -from torba.server.db import DB -import torba.server.daemon as daemon -from torba.server.session import ElectrumX, DashElectrumX, SessionManager - - -Block = namedtuple("Block", "raw header transactions") -OP_RETURN = OpCodes.OP_RETURN - - -class CoinError(Exception): - """Exception raised for coin-related errors.""" - - -class Coin: - """Base class of coin hierarchy.""" - - REORG_LIMIT = 200 - # Not sure if these are coin-specific - RPC_URL_REGEX = re.compile('.+@(\\[[0-9a-fA-F:]+\\]|[^:]+)(:[0-9]+)?') - VALUE_PER_COIN = 100000000 - CHUNK_SIZE = 2016 - BASIC_HEADER_SIZE = 80 - STATIC_BLOCK_HEADERS = True - SESSIONCLS = ElectrumX - DESERIALIZER = lib_tx.Deserializer - DAEMON = daemon.Daemon - BLOCK_PROCESSOR = block_proc.BlockProcessor - SESSION_MANAGER = SessionManager - DB = DB - HEADER_VALUES = [ - 'version', 'prev_block_hash', 'merkle_root', 'timestamp', 'bits', 'nonce' - ] - HEADER_UNPACK = struct.Struct('< I 32s 32s I I I').unpack_from - MEMPOOL_HISTOGRAM_REFRESH_SECS = 500 - XPUB_VERBYTES = bytes('????', 'utf-8') - XPRV_VERBYTES = bytes('????', 'utf-8') - ENCODE_CHECK = Base58.encode_check - DECODE_CHECK = Base58.decode_check - # Peer discovery - PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'} - PEERS: List[str] = [] - - @classmethod - def lookup_coin_class(cls, name, net): - """Return a coin class given name and network. - - Raise an exception if unrecognised.""" - req_attrs = ['TX_COUNT', 'TX_COUNT_HEIGHT', 'TX_PER_BLOCK'] - for coin in util.subclasses(Coin): - if (coin.NAME.lower() == name.lower() and - coin.NET.lower() == net.lower()): - coin_req_attrs = req_attrs.copy() - missing = [attr for attr in coin_req_attrs - if not hasattr(coin, attr)] - if missing: - raise CoinError(f'coin {name} missing {missing} attributes') - return coin - raise CoinError(f'unknown coin {name} and network {net} combination') - - @classmethod - def sanitize_url(cls, url): - # Remove surrounding ws and trailing /s - url = url.strip().rstrip('/') - match = cls.RPC_URL_REGEX.match(url) - if not match: - raise CoinError(f'invalid daemon URL: "{url}"') - if match.groups()[1] is None: - url += f':{cls.RPC_PORT:d}' - if not url.startswith('http://') and not url.startswith('https://'): - url = 'http://' + url - return url + '/' - - @classmethod - def genesis_block(cls, block): - """Check the Genesis block is the right one for this coin. - - Return the block less its unspendable coinbase. - """ - header = cls.block_header(block, 0) - header_hex_hash = hash_to_hex_str(cls.header_hash(header)) - if header_hex_hash != cls.GENESIS_HASH: - raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}') - - return header + bytes(1) - - @classmethod - def hashX_from_script(cls, script): - """Returns a hashX from a script, or None if the script is provably - unspendable so the output can be dropped. - """ - if script and script[0] == OP_RETURN: - return None - return sha256(script).digest()[:HASHX_LEN] - - @staticmethod - def lookup_xverbytes(verbytes): - """Return a (is_xpub, coin_class) pair given xpub/xprv verbytes.""" - # Order means BTC testnet will override NMC testnet - for coin in util.subclasses(Coin): - if verbytes == coin.XPUB_VERBYTES: - return True, coin - if verbytes == coin.XPRV_VERBYTES: - return False, coin - raise CoinError('version bytes unrecognised') - - @classmethod - def address_to_hashX(cls, address): - """Return a hashX given a coin address.""" - return cls.hashX_from_script(cls.pay_to_address_script(address)) - - @classmethod - def P2PKH_address_from_hash160(cls, hash160): - """Return a P2PKH address given a public key.""" - assert len(hash160) == 20 - return cls.ENCODE_CHECK(cls.P2PKH_VERBYTE + hash160) - - @classmethod - def P2PKH_address_from_pubkey(cls, pubkey): - """Return a coin address given a public key.""" - return cls.P2PKH_address_from_hash160(hash160(pubkey)) - - @classmethod - def P2SH_address_from_hash160(cls, hash160): - """Return a coin address given a hash160.""" - assert len(hash160) == 20 - return cls.ENCODE_CHECK(cls.P2SH_VERBYTES[0] + hash160) - - @classmethod - def hash160_to_P2PKH_script(cls, hash160): - return ScriptPubKey.P2PKH_script(hash160) - - @classmethod - def hash160_to_P2PKH_hashX(cls, hash160): - return cls.hashX_from_script(cls.hash160_to_P2PKH_script(hash160)) - - @classmethod - def pay_to_address_script(cls, address): - """Return a pubkey script that pays to a pubkey hash. - - Pass the address (either P2PKH or P2SH) in base58 form. - """ - raw = cls.DECODE_CHECK(address) - - # Require version byte(s) plus hash160. - verbyte = -1 - verlen = len(raw) - 20 - if verlen > 0: - verbyte, hash160 = raw[:verlen], raw[verlen:] - - if verbyte == cls.P2PKH_VERBYTE: - return cls.hash160_to_P2PKH_script(hash160) - if verbyte in cls.P2SH_VERBYTES: - return ScriptPubKey.P2SH_script(hash160) - - raise CoinError(f'invalid address: {address}') - - @classmethod - def privkey_WIF(cls, privkey_bytes, compressed): - """Return the private key encoded in Wallet Import Format.""" - payload = bytearray(cls.WIF_BYTE) + privkey_bytes - if compressed: - payload.append(0x01) - return cls.ENCODE_CHECK(payload) - - @classmethod - def header_hash(cls, header): - """Given a header return hash""" - return double_sha256(header) - - @classmethod - def header_prevhash(cls, header): - """Given a header return previous hash""" - return header[4:36] - - @classmethod - def static_header_offset(cls, height): - """Given a header height return its offset in the headers file. - - If header sizes change at some point, this is the only code - that needs updating.""" - assert cls.STATIC_BLOCK_HEADERS - return height * cls.BASIC_HEADER_SIZE - - @classmethod - def static_header_len(cls, height): - """Given a header height return its length.""" - return (cls.static_header_offset(height + 1) - - cls.static_header_offset(height)) - - @classmethod - def block_header(cls, block, height): - """Returns the block header given a block and its height.""" - return block[:cls.static_header_len(height)] - - @classmethod - def block(cls, raw_block, height): - """Return a Block namedtuple given a raw block and its height.""" - header = cls.block_header(raw_block, height) - txs = cls.DESERIALIZER(raw_block, start=len(header)).read_tx_block() - return Block(raw_block, header, txs) - - @classmethod - def decimal_value(cls, value): - """Return the number of standard coin units as a Decimal given a - quantity of smallest units. - - For example 1 BTC is returned for 100 million satoshis. - """ - return Decimal(value) / cls.VALUE_PER_COIN - - @classmethod - def electrum_header(cls, header, height): - h = dict(zip(cls.HEADER_VALUES, cls.HEADER_UNPACK(header))) - # Add the height that is not present in the header itself - h['block_height'] = height - # Convert bytes to str - h['prev_block_hash'] = hash_to_hex_str(h['prev_block_hash']) - h['merkle_root'] = hash_to_hex_str(h['merkle_root']) - return h - - -class AuxPowMixin: - STATIC_BLOCK_HEADERS = False - DESERIALIZER = lib_tx.DeserializerAuxPow - - @classmethod - def header_hash(cls, header): - """Given a header return hash""" - return double_sha256(header[:cls.BASIC_HEADER_SIZE]) - - @classmethod - def block_header(cls, block, height): - """Return the AuxPow block header bytes""" - deserializer = cls.DESERIALIZER(block) - return deserializer.read_header(height, cls.BASIC_HEADER_SIZE) - - -class EquihashMixin: - STATIC_BLOCK_HEADERS = False - BASIC_HEADER_SIZE = 140 # Excluding Equihash solution - DESERIALIZER = lib_tx.DeserializerEquihash - HEADER_VALUES = ['version', 'prev_block_hash', 'merkle_root', 'reserved', - 'timestamp', 'bits', 'nonce'] - HEADER_UNPACK = struct.Struct('< I 32s 32s 32s I I 32s').unpack_from - - @classmethod - def electrum_header(cls, header, height): - h = dict(zip(cls.HEADER_VALUES, cls.HEADER_UNPACK(header))) - # Add the height that is not present in the header itself - h['block_height'] = height - # Convert bytes to str - h['prev_block_hash'] = hash_to_hex_str(h['prev_block_hash']) - h['merkle_root'] = hash_to_hex_str(h['merkle_root']) - h['reserved'] = hash_to_hex_str(h['reserved']) - h['nonce'] = hash_to_hex_str(h['nonce']) - return h - - @classmethod - def block_header(cls, block, height): - """Return the block header bytes""" - deserializer = cls.DESERIALIZER(block) - return deserializer.read_header(height, cls.BASIC_HEADER_SIZE) - - -class ScryptMixin: - - DESERIALIZER = lib_tx.DeserializerTxTime - HEADER_HASH = None - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - if cls.HEADER_HASH is None: - import scrypt - cls.HEADER_HASH = lambda x: scrypt.hash(x, x, 1024, 1, 1, 32) - - version, = util.unpack_le_uint32_from(header) - if version > 6: - return super().header_hash(header) - else: - return cls.HEADER_HASH(header) - - -class KomodoMixin: - P2PKH_VERBYTE = bytes.fromhex("3C") - P2SH_VERBYTES = [bytes.fromhex("55")] - WIF_BYTE = bytes.fromhex("BC") - GENESIS_HASH = ('027e3758c3a65b12aa1046462b486d0a' - '63bfa1beae327897f56c5cfb7daaae71') - DESERIALIZER = lib_tx.DeserializerZcash - - -class BitcoinMixin: - SHORTNAME = "BTC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("00") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('000000000019d6689c085ae165831e93' - '4ff763ae46a2a6c172b3f1b60a8ce26f') - RPC_PORT = 8332 - - -class HOdlcoin(Coin): - NAME = "HOdlcoin" - SHORTNAME = "HODLC" - NET = "mainnet" - BASIC_HEADER_SIZE = 88 - P2PKH_VERBYTE = bytes.fromhex("28") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("a8") - GENESIS_HASH = ('008872e5582924544e5c707ee4b839bb' - '82c28a9e94e917c94b40538d5658c04b') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 258858 - TX_COUNT_HEIGHT = 382138 - TX_PER_BLOCK = 5 - - -class BitcoinCash(BitcoinMixin, Coin): - NAME = "BitcoinCash" - SHORTNAME = "BCH" - TX_COUNT = 246362688 - TX_COUNT_HEIGHT = 511484 - TX_PER_BLOCK = 400 - PEERS = [ - 'electroncash.cascharia.com s50002', - 'bch.electrumx.cash s t', - 'bccarihace4jdcnt.onion t52001 s52002', - 'abc1.hsmiths.com t60001 s60002', - 'electroncash.checksum0.com s t', - 'electrumx-cash.1209k.com s t', - 'electrum.leblancnet.us t50011 s50012', - 'electroncash.dk s t', - 'electrum.imaginary.cash s t', - ] - - -class BitcoinSegwit(BitcoinMixin, Coin): - NAME = "BitcoinSegwit" - DESERIALIZER = lib_tx.DeserializerSegWit - MEMPOOL_HISTOGRAM_REFRESH_SECS = 120 - TX_COUNT = 318337769 - TX_COUNT_HEIGHT = 524213 - TX_PER_BLOCK = 1400 - PEERS = [ - 'btc.smsys.me s995', - 'E-X.not.fyi s t', - 'elec.luggs.co s443', - 'electrum.vom-stausee.de s t', - 'electrum3.hachre.de s t', - 'electrum.hsmiths.com s t', - 'helicarrier.bauerj.eu s t', - 'hsmiths4fyqlw5xw.onion s t', - 'luggscoqbymhvnkp.onion t80', - 'ozahtqwp25chjdjd.onion s t', - 'node.arihanc.com s t', - 'arihancckjge66iv.onion s t', - ] - - -class BitcoinGold(EquihashMixin, BitcoinMixin, Coin): - CHUNK_SIZE = 252 - NAME = "BitcoinGold" - SHORTNAME = "BTG" - FORK_HEIGHT = 491407 - P2PKH_VERBYTE = bytes.fromhex("26") - P2SH_VERBYTES = [bytes.fromhex("17")] - DESERIALIZER = lib_tx.DeserializerEquihashSegWit - TX_COUNT = 265026255 - TX_COUNT_HEIGHT = 499923 - TX_PER_BLOCK = 50 - REORG_LIMIT = 1000 - RPC_PORT = 8338 - PEERS = [ - 'electrumx-eu.bitcoingold.org s50002 t50001', - 'electrumx-us.bitcoingold.org s50002 t50001', - 'electrumx-eu.btcgpu.org s50002 t50001', - 'electrumx-us.btcgpu.org s50002 t50001' - ] - - @classmethod - def header_hash(cls, header): - """Given a header return hash""" - height, = util.unpack_le_uint32_from(header, 68) - if height >= cls.FORK_HEIGHT: - return double_sha256(header) - else: - return double_sha256(header[:68] + header[100:112]) - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - h['reserved'] = hash_to_hex_str(header[72:100]) - h['solution'] = hash_to_hex_str(header[140:]) - return h - - -class BitcoinGoldTestnet(BitcoinGold): - FORK_HEIGHT = 1 - SHORTNAME = "TBTG" - XPUB_VERBYTES = bytes.fromhex("043587CF") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6F") - P2SH_VERBYTES = [bytes.fromhex("C4")] - WIF_BYTE = bytes.fromhex("EF") - TX_COUNT = 0 - TX_COUNT_HEIGHT = 1 - NET = 'testnet' - RPC_PORT = 18338 - GENESIS_HASH = ('00000000e0781ebe24b91eedc293adfe' - 'a2f557b53ec379e78959de3853e6f9f6') - PEERS = [ - 'test-node1.bitcoingold.org s50002', - 'test-node2.bitcoingold.org s50002', - 'test-node3.bitcoingold.org s50002', - 'test-node1.btcgpu.org s50002', - 'test-node2.btcgpu.org s50002', - 'test-node3.btcgpu.org s50002' - ] - - -class BitcoinGoldRegtest(BitcoinGold): - FORK_HEIGHT = 2000 - SHORTNAME = "TBTG" - XPUB_VERBYTES = bytes.fromhex("043587CF") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6F") - P2SH_VERBYTES = [bytes.fromhex("C4")] - WIF_BYTE = bytes.fromhex("EF") - TX_COUNT = 0 - TX_COUNT_HEIGHT = 1 - NET = 'regtest' - RPC_PORT = 18444 - GENESIS_HASH = ('0f9188f13cb7b2c71f2a335e3a4fc328' - 'bf5beb436012afca590b1a11466e2206') - PEERS: List[str] = [] - - -class Emercoin(Coin): - NAME = "Emercoin" - SHORTNAME = "EMC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("21") - P2SH_VERBYTES = [bytes.fromhex("5c")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('00000000bcccd459d036a588d1008fce' - '8da3754b205736f32ddfd35350e84c2d') - TX_COUNT = 217380620 - TX_COUNT_HEIGHT = 464000 - TX_PER_BLOCK = 1700 - VALUE_PER_COIN = 1000000 - RPC_PORT = 6662 - - DESERIALIZER = lib_tx.DeserializerTxTimeAuxPow - - PEERS: List[str] = [] - - @classmethod - def block_header(cls, block, height): - """Returns the block header given a block and its height.""" - deserializer = cls.DESERIALIZER(block) - - if deserializer.is_merged_block(): - return deserializer.read_header(height, cls.BASIC_HEADER_SIZE) - return block[:cls.static_header_len(height)] - - @classmethod - def header_hash(cls, header): - """Given a header return hash""" - return double_sha256(header[:cls.BASIC_HEADER_SIZE]) - - -class BitcoinTestnetMixin: - SHORTNAME = "XTN" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('000000000933ea01ad0ee984209779ba' - 'aec3ced90fa3f408719526f8d77f4943') - REORG_LIMIT = 8000 - TX_COUNT = 12242438 - TX_COUNT_HEIGHT = 1035428 - TX_PER_BLOCK = 21 - RPC_PORT = 18332 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - - -class BitcoinCashTestnet(BitcoinTestnetMixin, Coin): - """Bitcoin Testnet for Bitcoin Cash daemons.""" - NAME = "BitcoinCash" - PEERS = [ - 'electrum-testnet-abc.criptolayer.net s50112', - 'bchtestnet.arihanc.com t53001 s53002', - 'ciiattqkgzebpp6jofjbrkhvhwmgnsfoayljdcrve2p3qmkbv3duaoyd.onion ' - 't53001 s53002', - ] - - -class BitcoinCashRegtest(BitcoinCashTestnet): - NET = "regtest" - GENESIS_HASH = ('0f9188f13cb7b2c71f2a335e3a4fc328' - 'bf5beb436012afca590b1a11466e2206') - PEERS: List[str] = [] - TX_COUNT = 1 - TX_COUNT_HEIGHT = 1 - - -class BitcoinSegwitTestnet(BitcoinTestnetMixin, Coin): - """Bitcoin Testnet for Core bitcoind >= 0.13.1.""" - NAME = "BitcoinSegwit" - DESERIALIZER = lib_tx.DeserializerSegWit - PEERS = [ - 'electrum.akinbo.org s t', - 'he36kyperp3kbuxu.onion s t', - 'testnet.hsmiths.com t53011 s53012', - 'hsmithsxurybd7uh.onion t53011 s53012', - 'testnetnode.arihanc.com s t', - 'w3e2orjpiiv2qwem3dw66d7c4krink4nhttngkylglpqe5r22n6n5wid.onion s t', - 'testnet.qtornado.com s t', - ] - - -class BitcoinSegwitRegtest(BitcoinSegwitTestnet): - NAME = "BitcoinSegwit" - NET = "regtest" - GENESIS_HASH = ('0f9188f13cb7b2c71f2a335e3a4fc328' - 'bf5beb436012afca590b1a11466e2206') - PEERS: List[str] = [] - TX_COUNT = 1 - TX_COUNT_HEIGHT = 1 - - -class BitcoinNolnet(BitcoinCash): - """Bitcoin Unlimited nolimit testnet.""" - NET = "nolnet" - GENESIS_HASH = ('0000000057e31bd2066c939a63b7b862' - '3bd0f10d8c001304bdfc1a7902ae6d35') - PEERS: List[str] = [] - REORG_LIMIT = 8000 - TX_COUNT = 583589 - TX_COUNT_HEIGHT = 8617 - TX_PER_BLOCK = 50 - RPC_PORT = 28332 - PEER_DEFAULT_PORTS = {'t': '52001', 's': '52002'} - - -class Litecoin(Coin): - NAME = "Litecoin" - SHORTNAME = "LTC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("30") - P2SH_VERBYTES = [bytes.fromhex("32"), bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("b0") - GENESIS_HASH = ('12a765e31ffd4059bada1e25190f6e98' - 'c99d9714d334efa41a195a7e7e04bfe2') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 8908766 - TX_COUNT_HEIGHT = 1105256 - TX_PER_BLOCK = 10 - RPC_PORT = 9332 - REORG_LIMIT = 800 - PEERS = [ - 'elec.luggs.co s444', - 'electrum-ltc.bysh.me s t', - 'electrum-ltc.ddns.net s t', - 'electrum-ltc.wilv.in s t', - 'electrum.cryptomachine.com p1000 s t', - 'electrum.ltc.xurious.com s t', - 'eywr5eubdbbe2laq.onion s50008 t50007', - ] - - -class LitecoinTestnet(Litecoin): - SHORTNAME = "XLT" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("3a"), bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('4966625a4b2851d9fdee139e56211a0d' - '88575f59ed816ff5e6a63deb4e3e29a0') - TX_COUNT = 21772 - TX_COUNT_HEIGHT = 20800 - TX_PER_BLOCK = 2 - RPC_PORT = 19332 - REORG_LIMIT = 4000 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - PEERS = [ - 'electrum-ltc.bysh.me s t', - 'electrum.ltc.xurious.com s t', - ] - - -class Viacoin(AuxPowMixin, Coin): - NAME = "Viacoin" - SHORTNAME = "VIA" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("47") - P2SH_VERBYTES = [bytes.fromhex("21")] - WIF_BYTE = bytes.fromhex("c7") - GENESIS_HASH = ('4e9b54001f9976049830128ec0331515' - 'eaabe35a70970d79971da1539a400ba1') - TX_COUNT = 113638 - TX_COUNT_HEIGHT = 3473674 - TX_PER_BLOCK = 30 - RPC_PORT = 5222 - REORG_LIMIT = 5000 - DESERIALIZER: Type = lib_tx.DeserializerAuxPowSegWit - PEERS = [ - 'vialectrum.bitops.me s t', - 'server.vialectrum.org s t', - 'vialectrum.viacoin.net s t', - 'viax1.bitops.me s t', - ] - - -class ViacoinTestnet(Viacoin): - SHORTNAME = "TVI" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("7f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ff") - GENESIS_HASH = ('00000007199508e34a9ff81e6ec0c477' - 'a4cccff2a4767a8eee39c11db367b008') - RPC_PORT = 25222 - REORG_LIMIT = 2500 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - PEERS = [ - 'vialectrum.bysh.me s t', - ] - - -class ViacoinTestnetSegWit(ViacoinTestnet): - NET = "testnet-segwit" - DESERIALIZER = lib_tx.DeserializerSegWit - - -# Source: namecoin.org -class Namecoin(AuxPowMixin, Coin): - NAME = "Namecoin" - SHORTNAME = "NMC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("d7dd6370") - XPRV_VERBYTES = bytes.fromhex("d7dc6e31") - P2PKH_VERBYTE = bytes.fromhex("34") - P2SH_VERBYTES = [bytes.fromhex("0d")] - WIF_BYTE = bytes.fromhex("e4") - GENESIS_HASH = ('000000000062b72c5e2ceb45fbc8587e' - '807c155b0da735e6483dfba2f0a9c770') - TX_COUNT = 4415768 - TX_COUNT_HEIGHT = 329065 - TX_PER_BLOCK = 10 - PEERS = [ - 'elec.luggs.co s446', - ] - BLOCK_PROCESSOR = block_proc.NamecoinBlockProcessor - - @classmethod - def split_name_script(cls, script): - from torba.server.script import _match_ops, Script, ScriptError - - try: - ops = Script.get_ops(script) - except ScriptError: - return None, script - - match = _match_ops - - # Name opcodes - OP_NAME_NEW = OpCodes.OP_1 - OP_NAME_FIRSTUPDATE = OpCodes.OP_2 - OP_NAME_UPDATE = OpCodes.OP_3 - - # Opcode sequences for name operations - NAME_NEW_OPS = [OP_NAME_NEW, -1, OpCodes.OP_2DROP] - NAME_FIRSTUPDATE_OPS = [OP_NAME_FIRSTUPDATE, -1, -1, -1, - OpCodes.OP_2DROP, OpCodes.OP_2DROP] - NAME_UPDATE_OPS = [OP_NAME_UPDATE, -1, -1, OpCodes.OP_2DROP, - OpCodes.OP_DROP] - - name_script_op_count = None - name_pushdata = None - - # Detect name operations; determine count of opcodes. - # Also extract the name field -- we might use that for something in a - # future version. - if match(ops[:len(NAME_NEW_OPS)], NAME_NEW_OPS): - name_script_op_count = len(NAME_NEW_OPS) - elif match(ops[:len(NAME_FIRSTUPDATE_OPS)], NAME_FIRSTUPDATE_OPS): - name_script_op_count = len(NAME_FIRSTUPDATE_OPS) - name_pushdata = ops[1] - elif match(ops[:len(NAME_UPDATE_OPS)], NAME_UPDATE_OPS): - name_script_op_count = len(NAME_UPDATE_OPS) - name_pushdata = ops[1] - - if name_script_op_count is None: - return None, script - - # Find the end position of the name data - n = 0 - for i in range(name_script_op_count): - # Content of this loop is copied from Script.get_ops's loop - op = script[n] - n += 1 - - if op <= OpCodes.OP_PUSHDATA4: - # Raw bytes follow - if op < OpCodes.OP_PUSHDATA1: - dlen = op - elif op == OpCodes.OP_PUSHDATA1: - dlen = script[n] - n += 1 - elif op == OpCodes.OP_PUSHDATA2: - dlen, = struct.unpack(' len(script): - raise IndexError - op = (op, script[n:n + dlen]) - n += dlen - # Strip the name data to yield the address script - address_script = script[n:] - - if name_pushdata is None: - return None, address_script - - normalized_name_op_script = bytearray() - normalized_name_op_script.append(OP_NAME_UPDATE) - normalized_name_op_script.extend(Script.push_data(name_pushdata[1])) - normalized_name_op_script.extend(Script.push_data(bytes([]))) - normalized_name_op_script.append(OpCodes.OP_2DROP) - normalized_name_op_script.append(OpCodes.OP_DROP) - normalized_name_op_script.append(OpCodes.OP_RETURN) - - return bytes(normalized_name_op_script), address_script - - @classmethod - def hashX_from_script(cls, script): - name_op_script, address_script = cls.split_name_script(script) - - return super().hashX_from_script(address_script) - - @classmethod - def address_from_script(cls, script): - name_op_script, address_script = cls.split_name_script(script) - - return super().address_from_script(address_script) - - @classmethod - def name_hashX_from_script(cls, script): - name_op_script, address_script = cls.split_name_script(script) - - if name_op_script is None: - return None - - return super().hashX_from_script(name_op_script) - - -class NamecoinTestnet(Namecoin): - NAME = "Namecoin" - SHORTNAME = "XNM" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('00000007199508e34a9ff81e6ec0c477' - 'a4cccff2a4767a8eee39c11db367b008') - - -class Dogecoin(AuxPowMixin, Coin): - NAME = "Dogecoin" - SHORTNAME = "DOGE" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("02facafd") - XPRV_VERBYTES = bytes.fromhex("02fac398") - P2PKH_VERBYTE = bytes.fromhex("1e") - P2SH_VERBYTES = [bytes.fromhex("16")] - WIF_BYTE = bytes.fromhex("9e") - GENESIS_HASH = ('1a91e3dace36e2be3bf030a65679fe82' - '1aa1d6ef92e7c9902eb318182c355691') - TX_COUNT = 27583427 - TX_COUNT_HEIGHT = 1604979 - TX_PER_BLOCK = 20 - REORG_LIMIT = 2000 - - -class DogecoinTestnet(Dogecoin): - NAME = "Dogecoin" - SHORTNAME = "XDT" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("71") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("f1") - GENESIS_HASH = ('bb0a78264637406b6360aad926284d54' - '4d7049f45189db5664f3c4d07350559e') - - -# Source: https://github.com/motioncrypto/motion -class Motion(Coin): - NAME = "Motion" - SHORTNAME = "XMN" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - GENESIS_HASH = ('000001e9dc60dd2618e91f7b90141349' - '22c374496b61c1a272519b1c39979d78') - P2PKH_VERBYTE = bytes.fromhex("32") - P2SH_VERBYTES = [bytes.fromhex("12")] - WIF_BYTE = bytes.fromhex("80") - TX_COUNT_HEIGHT = 54353 - TX_COUNT = 92701 - TX_PER_BLOCK = 4 - RPC_PORT = 3385 - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import x16r_hash - return x16r_hash.getPoWHash(header) - - -# Source: https://github.com/dashpay/dash -class Dash(Coin): - NAME = "Dash" - SHORTNAME = "DASH" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("02fe52cc") - XPRV_VERBYTES = bytes.fromhex("02fe52f8") - GENESIS_HASH = ('00000ffd590b1485b3caadc19b22e637' - '9c733355108f107a430458cdf3407ab6') - P2PKH_VERBYTE = bytes.fromhex("4c") - P2SH_VERBYTES = [bytes.fromhex("10")] - WIF_BYTE = bytes.fromhex("cc") - TX_COUNT_HEIGHT = 569399 - TX_COUNT = 2157510 - TX_PER_BLOCK = 4 - RPC_PORT = 9998 - PEERS = [ - 'electrum.dash.org s t', - 'electrum.masternode.io s t', - 'electrum-drk.club s t', - 'dashcrypto.space s t', - 'electrum.dash.siampm.com s t', - 'wl4sfwq2hwxnodof.onion s t', - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import x11_hash - return x11_hash.getPoWHash(header) - - -class DashTestnet(Dash): - SHORTNAME = "tDASH" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("3a805837") - XPRV_VERBYTES = bytes.fromhex("3a8061a0") - GENESIS_HASH = ('00000bafbc94add76cb75e2ec9289483' - '7288a481e5c005f6563d91623bf8bc2c') - P2PKH_VERBYTE = bytes.fromhex("8c") - P2SH_VERBYTES = [bytes.fromhex("13")] - WIF_BYTE = bytes.fromhex("ef") - TX_COUNT_HEIGHT = 101619 - TX_COUNT = 132681 - TX_PER_BLOCK = 1 - RPC_PORT = 19998 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - PEERS = [ - 'electrum.dash.siampm.com s t', - 'dasht.random.re s54002 t54001', - ] - - -class Argentum(AuxPowMixin, Coin): - NAME = "Argentum" - SHORTNAME = "ARG" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("17") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("97") - GENESIS_HASH = ('88c667bc63167685e4e4da058fffdfe8' - 'e007e5abffd6855de52ad59df7bb0bb2') - TX_COUNT = 2263089 - TX_COUNT_HEIGHT = 2050260 - TX_PER_BLOCK = 2000 - RPC_PORT = 13581 - - -class ArgentumTestnet(Argentum): - SHORTNAME = "XRG" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - REORG_LIMIT = 2000 - - -class DigiByte(Coin): - NAME = "DigiByte" - SHORTNAME = "DGB" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1E") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('7497ea1b465eb39f1c8f507bc877078f' - 'e016d6fcb6dfad3a64c98dcc6e1e8496') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 1046018 - TX_COUNT_HEIGHT = 1435000 - TX_PER_BLOCK = 1000 - RPC_PORT = 12022 - - -class DigiByteTestnet(DigiByte): - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('b5dca8039e300198e5fe7cd23bdd1728' - 'e2a444af34c447dbd0916fa3430a68c2') - RPC_PORT = 15022 - REORG_LIMIT = 2000 - - -class FairCoin(Coin): - NAME = "FairCoin" - SHORTNAME = "FAIR" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("5f") - P2SH_VERBYTES = [bytes.fromhex("24")] - WIF_BYTE = bytes.fromhex("df") - GENESIS_HASH = ('beed44fa5e96150d95d56ebd5d262578' - '1825a9407a5215dd7eda723373a0a1d7') - BASIC_HEADER_SIZE = 108 - HEADER_VALUES = ['version', 'prev_block_hash', 'merkle_root', - 'payload_hash', 'timestamp', 'creatorId'] - HEADER_UNPACK = struct.Struct('< I 32s 32s 32s I I').unpack_from - TX_COUNT = 505 - TX_COUNT_HEIGHT = 470 - TX_PER_BLOCK = 1 - RPC_PORT = 40405 - PEER_DEFAULT_PORTS = {'t': '51811', 's': '51812'} - PEERS = [ - 'electrum.faircoin.world s', - 'electrumfair.punto0.org s', - ] - - @classmethod - def block(cls, raw_block, height): - """Return a Block namedtuple given a raw block and its height.""" - if height > 0: - return super().block(raw_block, height) - else: - return Block(raw_block, cls.block_header(raw_block, height), []) - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - h['payload_hash'] = hash_to_hex_str(h['payload_hash']) - return h - - -class Zcash(EquihashMixin, Coin): - NAME = "Zcash" - SHORTNAME = "ZEC" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1CB8") - P2SH_VERBYTES = [bytes.fromhex("1CBD")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('00040fe8ec8471911baa1db1266ea15d' - 'd06b4a8a5c453883c000b031973dce08') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 329196 - TX_COUNT_HEIGHT = 68379 - TX_PER_BLOCK = 5 - RPC_PORT = 8232 - REORG_LIMIT = 800 - - -class ZcashTestnet(Zcash): - SHORTNAME = "TAZ" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("1D25") - P2SH_VERBYTES = [bytes.fromhex("1CBA")] - WIF_BYTE = bytes.fromhex("EF") - GENESIS_HASH = ('05a60a92d99d85997cce3b87616c089f' - '6124d7342af37106edc76126334a2c38') - TX_COUNT = 242312 - TX_COUNT_HEIGHT = 321685 - TX_PER_BLOCK = 2 - RPC_PORT = 18232 - - -class SnowGem(EquihashMixin, Coin): - NAME = "SnowGem" - SHORTNAME = "SNG" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1C28") - P2SH_VERBYTES = [bytes.fromhex("1C2D")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('00068b35729d9d2b0c294ff1fe9af009' - '4740524311a131de40e7f705e4c29a5b') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 140698 - TX_COUNT_HEIGHT = 102802 - TX_PER_BLOCK = 2 - RPC_PORT = 16112 - REORG_LIMIT = 800 - CHUNK_SIZE = 200 - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - h['n_solution'] = base64.b64encode(lib_tx.Deserializer( - header, start=140)._read_varbytes()).decode('utf8') - return h - - -class BitcoinZ(EquihashMixin, Coin): - NAME = "BitcoinZ" - SHORTNAME = "BTCZ" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1CB8") - P2SH_VERBYTES = [bytes.fromhex("1CBD")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('f499ee3d498b4298ac6a64205b8addb7' - 'c43197e2a660229be65db8a4534d75c1') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 171976 - TX_COUNT_HEIGHT = 81323 - TX_PER_BLOCK = 3 - RPC_PORT = 1979 - REORG_LIMIT = 800 - - -class Hush(EquihashMixin, Coin): - NAME = "Hush" - SHORTNAME = "HUSH" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1CB8") - P2SH_VERBYTES = [bytes.fromhex("1CBD")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('0003a67bc26fe564b75daf11186d3606' - '52eb435a35ba3d9d3e7e5d5f8e62dc17') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 329196 - TX_COUNT_HEIGHT = 68379 - TX_PER_BLOCK = 5 - RPC_PORT = 8822 - REORG_LIMIT = 800 - - -class Zclassic(EquihashMixin, Coin): - NAME = "Zclassic" - SHORTNAME = "ZCL" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1CB8") - P2SH_VERBYTES = [bytes.fromhex("1CBD")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('0007104ccda289427919efc39dc9e4d4' - '99804b7bebc22df55f8b834301260602') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 329196 - TX_COUNT_HEIGHT = 68379 - TX_PER_BLOCK = 5 - RPC_PORT = 8023 - REORG_LIMIT = 800 - - -class Koto(Coin): - NAME = "Koto" - SHORTNAME = "KOTO" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("1836") - P2SH_VERBYTES = [bytes.fromhex("183B")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('6d424c350729ae633275d51dc3496e16' - 'cd1b1d195c164da00f39c499a2e9959e') - DESERIALIZER = lib_tx.DeserializerZcash - TX_COUNT = 158914 - TX_COUNT_HEIGHT = 67574 - TX_PER_BLOCK = 3 - RPC_PORT = 8432 - REORG_LIMIT = 800 - PEERS = [ - 'fr.kotocoin.info s t', - 'electrum.kotocoin.info s t', - ] - - -class KotoTestnet(Koto): - SHORTNAME = "TOKO" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("18A4") - P2SH_VERBYTES = [bytes.fromhex("1839")] - WIF_BYTE = bytes.fromhex("EF") - GENESIS_HASH = ('bf84afbde20c2d213b68b231ddb585ab' - '616ef7567226820f00d9b397d774d2f0') - TX_COUNT = 91144 - TX_COUNT_HEIGHT = 89662 - TX_PER_BLOCK = 1 - RPC_PORT = 18432 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - PEERS = [ - 'testnet.kotocoin.info s t', - ] - - -class Komodo(KomodoMixin, EquihashMixin, Coin): - NAME = "Komodo" - SHORTNAME = "KMD" - NET = "mainnet" - TX_COUNT = 693629 - TX_COUNT_HEIGHT = 491777 - TX_PER_BLOCK = 2 - RPC_PORT = 7771 - REORG_LIMIT = 800 - PEERS: List[str] = [] - - -class Monaize(KomodoMixin, EquihashMixin, Coin): - NAME = "Monaize" - SHORTNAME = "MNZ" - NET = "mainnet" - TX_COUNT = 256 - TX_COUNT_HEIGHT = 128 - TX_PER_BLOCK = 2 - RPC_PORT = 14337 - REORG_LIMIT = 800 - PEERS: List[str] = [] - - -class Einsteinium(Coin): - NAME = "Einsteinium" - SHORTNAME = "EMC2" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("21") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("b0") - GENESIS_HASH = ('4e56204bb7b8ac06f860ff1c845f03f9' - '84303b5b97eb7b42868f714611aed94b') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 2087559 - TX_COUNT_HEIGHT = 1358517 - TX_PER_BLOCK = 2 - RPC_PORT = 41879 - REORG_LIMIT = 2000 - - -class Blackcoin(ScryptMixin, Coin): - NAME = "Blackcoin" - SHORTNAME = "BLK" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("19") - P2SH_VERBYTES = [bytes.fromhex("55")] - WIF_BYTE = bytes.fromhex("99") - GENESIS_HASH = ('000001faef25dec4fbcf906e6242621d' - 'f2c183bf232f263d0ba5b101911e4563') - DAEMON = daemon.LegacyRPCDaemon - TX_COUNT = 4594999 - TX_COUNT_HEIGHT = 1667070 - TX_PER_BLOCK = 3 - RPC_PORT = 15715 - REORG_LIMIT = 5000 - - -class Bitbay(ScryptMixin, Coin): - NAME = "Bitbay" - SHORTNAME = "BAY" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("19") - P2SH_VERBYTES = [bytes.fromhex("55")] - WIF_BYTE = bytes.fromhex("99") - GENESIS_HASH = ('0000075685d3be1f253ce777174b1594' - '354e79954d2a32a6f77fe9cba00e6467') - TX_COUNT = 4594999 - TX_COUNT_HEIGHT = 1667070 - TX_PER_BLOCK = 3 - RPC_PORT = 19914 - REORG_LIMIT = 5000 - - -class Peercoin(Coin): - NAME = "Peercoin" - SHORTNAME = "PPC" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("37") - P2SH_VERBYTES = [bytes.fromhex("75")] - WIF_BYTE = bytes.fromhex("b7") - GENESIS_HASH = ('0000000032fe677166d54963b62a4677' - 'd8957e87c508eaa4fd7eb1c880cd27e3') - DESERIALIZER = lib_tx.DeserializerTxTime - DAEMON = daemon.LegacyRPCDaemon - TX_COUNT = 1207356 - TX_COUNT_HEIGHT = 306425 - TX_PER_BLOCK = 4 - RPC_PORT = 9902 - REORG_LIMIT = 5000 - - -class Reddcoin(Coin): - NAME = "Reddcoin" - SHORTNAME = "RDD" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("3d") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("bd") - GENESIS_HASH = ('b868e0d95a3c3c0e0dadc67ee587aaf9' - 'dc8acbf99e3b4b3110fad4eb74c1decc') - DESERIALIZER = lib_tx.DeserializerReddcoin - TX_COUNT = 5413508 - TX_COUNT_HEIGHT = 1717382 - TX_PER_BLOCK = 3 - RPC_PORT = 45443 - - -class TokenPay(ScryptMixin, Coin): - NAME = "TokenPay" - SHORTNAME = "TPAY" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("41") - P2SH_VERBYTES = [bytes.fromhex("7e")] - WIF_BYTE = bytes.fromhex("b3") - GENESIS_HASH = ('000008b71ab32e585a23f0de642dc113' - '740144e94c0ece047751e9781f953ae9') - DESERIALIZER = lib_tx.DeserializerTokenPay - DAEMON = daemon.LegacyRPCDaemon - TX_COUNT = 147934 - TX_COUNT_HEIGHT = 73967 - TX_PER_BLOCK = 100 - RPC_PORT = 8800 - REORG_LIMIT = 500 - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - - PEERS = [ - "electrum-us.tpay.ai s", - "electrum-eu.tpay.ai s", - ] - - -class Vertcoin(Coin): - NAME = "Vertcoin" - SHORTNAME = "VTC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - P2PKH_VERBYTE = bytes.fromhex("47") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('4d96a915f49d40b1e5c2844d1ee2dccb' - '90013a990ccea12c492d22110489f0c4') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 2383423 - TX_COUNT_HEIGHT = 759076 - TX_PER_BLOCK = 3 - RPC_PORT = 5888 - REORG_LIMIT = 1000 - - -class Monacoin(Coin): - NAME = "Monacoin" - SHORTNAME = "MONA" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - P2PKH_VERBYTE = bytes.fromhex("32") - P2SH_VERBYTES = [bytes.fromhex("37"), bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("B0") - GENESIS_HASH = ('ff9f1c0116d19de7c9963845e129f9ed' - '1bfc0b376eb54fd7afa42e0d418c8bb6') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 2568580 - TX_COUNT_HEIGHT = 1029766 - TX_PER_BLOCK = 2 - RPC_PORT = 9402 - REORG_LIMIT = 1000 - PEERS = [ - 'electrumx.tamami-foundation.org s t', - 'electrumx2.tamami-foundation.org s t', - 'electrumx3.tamami-foundation.org s t', - 'electrumx1.monacoin.nl s t', - 'electrumx2.monacoin.nl s t', - 'electrumx1.monacoin.ninja s t', - 'electrumx2.monacoin.ninja s t', - 'electrumx2.movsign.info s t', - 'electrum-mona.bitbank.cc s t', - 'ri7rzlmdaf4eqbza.onion s t', - ] - - -class MonacoinTestnet(Monacoin): - SHORTNAME = "XMN" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587CF") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6F") - P2SH_VERBYTES = [bytes.fromhex("75"), bytes.fromhex("C4")] - WIF_BYTE = bytes.fromhex("EF") - GENESIS_HASH = ('a2b106ceba3be0c6d097b2a6a6aacf9d' - '638ba8258ae478158f449c321061e0b2') - TX_COUNT = 83602 - TX_COUNT_HEIGHT = 83252 - TX_PER_BLOCK = 1 - RPC_PORT = 19402 - REORG_LIMIT = 1000 - PEER_DEFAULT_PORTS = {'t': '51001', 's': '51002'} - PEERS = [ - 'electrumx1.testnet.monacoin.ninja s t', - 'electrumx1.testnet.monacoin.nl s t', - ] - - -class Crown(AuxPowMixin, Coin): - NAME = "Crown" - SHORTNAME = "CRW" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("00") - P2SH_VERBYTES = [bytes.fromhex("1c")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('0000000085370d5e122f64f4ab19c686' - '14ff3df78c8d13cb814fd7e69a1dc6da') - TX_COUNT = 13336629 - TX_COUNT_HEIGHT = 1268206 - TX_PER_BLOCK = 10 - RPC_PORT = 9341 - REORG_LIMIT = 1000 - PEERS = [ - 'sgp-crwseed.crowndns.info s t', - 'blr-crwseed.crowndns.info s t', - 'sfo-crwseed.crowndns.info s t', - 'nyc-crwseed.crowndns.info s t', - 'ams-crwseed.crowndns.info s t', - 'tor-crwseed.crowndns.info s t', - 'lon-crwseed.crowndns.info s t', - 'fra-crwseed.crowndns.info s t', - ] - - -class Fujicoin(Coin): - NAME = "Fujicoin" - SHORTNAME = "FJC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("24") - P2SH_VERBYTES = [bytes.fromhex("10")] - WIF_BYTE = bytes.fromhex("a4") - GENESIS_HASH = ('adb6d9cfd74075e7f91608add4bd2a2e' - 'a636f70856183086842667a1597714a0') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 170478 - TX_COUNT_HEIGHT = 1521676 - TX_PER_BLOCK = 1 - RPC_PORT = 3776 - REORG_LIMIT = 1000 - - -class Neblio(ScryptMixin, Coin): - NAME = "Neblio" - SHORTNAME = "NEBL" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("35") - P2SH_VERBYTES = [bytes.fromhex("70")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('7286972be4dbc1463d256049b7471c25' - '2e6557e222cab9be73181d359cd28bcc') - TX_COUNT = 23675 - TX_COUNT_HEIGHT = 22785 - TX_PER_BLOCK = 1 - RPC_PORT = 6326 - REORG_LIMIT = 1000 - - -class Bitzeny(Coin): - NAME = "Bitzeny" - SHORTNAME = "ZNY" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("51") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('000009f7e55e9e3b4781e22bd87a7cfa' - '4acada9e4340d43ca738bf4e9fb8f5ce') - ESTIMATE_FEE = 0.001 - RELAY_FEE = 0.001 - DAEMON = daemon.FakeEstimateFeeDaemon - TX_COUNT = 1408733 - TX_COUNT_HEIGHT = 1015115 - TX_PER_BLOCK = 1 - RPC_PORT = 9252 - REORG_LIMIT = 1000 - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import zny_yescrypt - return zny_yescrypt.getPoWHash(header) - - -class CanadaeCoin(AuxPowMixin, Coin): - NAME = "CanadaeCoin" - SHORTNAME = "CDN" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("1C") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("9c") - GENESIS_HASH = ('863626dadaef221e2e2f30ff3dacae44' - 'cabdae9e0028058072181b3fb675d94a') - ESTIMATE_FEE = 0.0001 - RELAY_FEE = 0.0001 - DAEMON = daemon.FakeEstimateFeeDaemon - TX_COUNT = 3455905 - TX_COUNT_HEIGHT = 3645419 - TX_PER_BLOCK = 1 - RPC_PORT = 34330 - REORG_LIMIT = 1000 - - -class Denarius(Coin): - NAME = "Denarius" - SHORTNAME = "DNR" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("1E") # Address starts with a D - P2SH_VERBYTES = [bytes.fromhex("5A")] - WIF_BYTE = bytes.fromhex("9E") # WIF starts with a 6 - GENESIS_HASH = ('00000d5dbbda01621cfc16bbc1f9bf32' - '64d641a5dbf0de89fd0182c2c4828fcd') - DESERIALIZER = lib_tx.DeserializerTxTime - TX_COUNT = 4230 - RPC_PORT = 32339 - ESTIMATE_FEE = 0.00001 - RELAY_FEE = 0.00001 - DAEMON = daemon.FakeEstimateFeeDaemon - TX_COUNT_HEIGHT = 306187 - TX_PER_BLOCK = 4000 - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import tribus_hash - return tribus_hash.getPoWHash(header) - - -class DenariusTestnet(Denarius): - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("12") - P2SH_VERBYTES = [bytes.fromhex("74")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('000086bfe8264d241f7f8e5393f74778' - '4b8ca2aa98bdd066278d590462a4fdb4') - RPC_PORT = 32338 - REORG_LIMIT = 2000 - - -class Sibcoin(Dash): - NAME = "Sibcoin" - SHORTNAME = "SIB" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("3F") - P2SH_VERBYTES = [bytes.fromhex("28")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('00000c492bf73490420868bc577680bf' - 'c4c60116e7e85343bc624787c21efa4c') - DAEMON = daemon.DashDaemon - TX_COUNT = 1000 - TX_COUNT_HEIGHT = 10000 - TX_PER_BLOCK = 1 - RPC_PORT = 1944 - REORG_LIMIT = 1000 - PEERS: List[str] = [] - - @classmethod - def header_hash(cls, header): - """ - Given a header return the hash for sibcoin. - Need to download `x11_gost_hash` module - Source code: https://github.com/ivansib/x11_gost_hash - """ - import x11_gost_hash - return x11_gost_hash.getPoWHash(header) - - -class Chips(Coin): - NAME = "Chips" - SHORTNAME = "CHIPS" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("3c") - P2SH_VERBYTES = [bytes.fromhex("55")] - WIF_BYTE = bytes.fromhex("bc") - GENESIS_HASH = ('0000006e75f6aa0efdbf7db03132aa4e' - '4d0c84951537a6f5a7c39a0a9d30e1e7') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 145290 - TX_COUNT_HEIGHT = 318637 - TX_PER_BLOCK = 2 - RPC_PORT = 57776 - REORG_LIMIT = 800 - - -class Feathercoin(Coin): - NAME = "Feathercoin" - SHORTNAME = "FTC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488BC26") - XPRV_VERBYTES = bytes.fromhex("0488DAEE") - P2PKH_VERBYTE = bytes.fromhex("0E") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("8E") - GENESIS_HASH = ('12a765e31ffd4059bada1e25190f6e98' - 'c99d9714d334efa41a195a7e7e04bfe2') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 3170843 - TX_COUNT_HEIGHT = 1981777 - TX_PER_BLOCK = 2 - RPC_PORT = 9337 - REORG_LIMIT = 2000 - PEERS = [ - 'electrumx-ch-1.feathercoin.ch s t', - ] - - -class UFO(Coin): - NAME = "UniformFiscalObject" - SHORTNAME = "UFO" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - P2PKH_VERBYTE = bytes.fromhex("1B") - P2SH_VERBYTES = [bytes.fromhex("44")] - WIF_BYTE = bytes.fromhex("9B") - GENESIS_HASH = ('ba1d39b4928ab03d813d952daf65fb77' - '97fcf538a9c1b8274f4edc8557722d13') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 1608926 - TX_COUNT_HEIGHT = 1300154 - TX_PER_BLOCK = 2 - RPC_PORT = 9888 - REORG_LIMIT = 2000 - PEERS = [ - 'electrumx1.ufobject.com s t', - ] - - -class Newyorkcoin(AuxPowMixin, Coin): - NAME = "Newyorkcoin" - SHORTNAME = "NYC" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("3c") - P2SH_VERBYTES = [bytes.fromhex("16")] - WIF_BYTE = bytes.fromhex("bc") - GENESIS_HASH = ('5597f25c062a3038c7fd815fe46c67de' - 'dfcb3c839fbc8e01ed4044540d08fe48') - TX_COUNT = 5161944 - TX_COUNT_HEIGHT = 3948743 - TX_PER_BLOCK = 2 - REORG_LIMIT = 2000 - - -class NewyorkcoinTestnet(Newyorkcoin): - SHORTNAME = "tNYC" - NET = "testnet" - P2PKH_VERBYTE = bytes.fromhex("71") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("f1") - GENESIS_HASH = ('24463e4d3c625b0a9059f309044c2cf0' - 'd7e196cf2a6ecce901f24f681be33c8f') - TX_COUNT = 5161944 - TX_COUNT_HEIGHT = 3948743 - TX_PER_BLOCK = 2 - REORG_LIMIT = 2000 - - -class Bitcore(BitcoinMixin, Coin): - NAME = "Bitcore" - SHORTNAME = "BTX" - P2PKH_VERBYTE = bytes.fromhex("03") - P2SH_VERBYTES = [bytes.fromhex("7D")] - WIF_BYTE = bytes.fromhex("80") - DESERIALIZER = lib_tx.DeserializerSegWit - GENESIS_HASH = ('604148281e5c4b7f2487e5d03cd60d8e' - '6f69411d613f6448034508cea52e9574') - TX_COUNT = 126979 - TX_COUNT_HEIGHT = 126946 - TX_PER_BLOCK = 2 - RPC_PORT = 8556 - - -class GameCredits(Coin): - NAME = "GameCredits" - SHORTNAME = "GAME" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("26") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("a6") - GENESIS_HASH = ('91ec5f25ee9a0ffa1af7d4da4db9a552' - '228dd2dc77cdb15b738be4e1f55f30ee') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 316796 - TX_COUNT_HEIGHT = 2040250 - TX_PER_BLOCK = 2 - RPC_PORT = 40001 - REORG_LIMIT = 1000 - - -class Machinecoin(Coin): - NAME = "Machinecoin" - SHORTNAME = "MAC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("32") - P2SH_VERBYTES = [bytes.fromhex("26"), bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("b2") - GENESIS_HASH = ('6a1f879bcea5471cbfdee1fd0cb2ddcc' - '4fed569a500e352d41de967703e83172') - DESERIALIZER = lib_tx.DeserializerSegWit - TX_COUNT = 137641 - TX_COUNT_HEIGHT = 513020 - TX_PER_BLOCK = 2 - RPC_PORT = 40332 - REORG_LIMIT = 800 - - -class BitcoinAtom(Coin): - NAME = "BitcoinAtom" - SHORTNAME = "BCA" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("17") - P2SH_VERBYTES = [bytes.fromhex("0a")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('000000000019d6689c085ae165831e93' - '4ff763ae46a2a6c172b3f1b60a8ce26f') - STATIC_BLOCK_HEADERS = False - DESERIALIZER = lib_tx.DeserializerBitcoinAtom - HEADER_SIZE_POST_FORK = 84 - BLOCK_PROOF_OF_STAKE = 0x01 - BLOCK_PROOF_OF_STAKE_FLAGS = b'\x01\x00\x00\x00' - TX_COUNT = 295158744 - TX_COUNT_HEIGHT = 589197 - TX_PER_BLOCK = 10 - RPC_PORT = 9136 - REORG_LIMIT = 5000 - - @classmethod - def header_hash(cls, header): - """Given a header return hash""" - header_to_be_hashed = header[:cls.BASIC_HEADER_SIZE] - # New block header format has some extra flags in the end - if len(header) == cls.HEADER_SIZE_POST_FORK: - flags, = util.unpack_le_uint32_from(header, len(header) - 4) - # Proof of work blocks have special serialization - if flags & cls.BLOCK_PROOF_OF_STAKE != 0: - header_to_be_hashed += cls.BLOCK_PROOF_OF_STAKE_FLAGS - - return double_sha256(header_to_be_hashed) - - @classmethod - def block_header(cls, block, height): - """Return the block header bytes""" - deserializer = cls.DESERIALIZER(block) - return deserializer.read_header(height, cls.BASIC_HEADER_SIZE) - - -class Decred(Coin): - NAME = "Decred" - SHORTNAME = "DCR" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("02fda926") - XPRV_VERBYTES = bytes.fromhex("02fda4e8") - P2PKH_VERBYTE = bytes.fromhex("073f") - P2SH_VERBYTES = [bytes.fromhex("071a")] - WIF_BYTE = bytes.fromhex("22de") - GENESIS_HASH = ('298e5cc3d985bfe7f81dc135f360abe0' - '89edd4396b86d2de66b0cef42b21d980') - BASIC_HEADER_SIZE = 180 - HEADER_HASH = lib_tx.DeserializerDecred.blake256 - DESERIALIZER = lib_tx.DeserializerDecred - DAEMON = daemon.DecredDaemon - BLOCK_PROCESSOR = block_proc.DecredBlockProcessor - ENCODE_CHECK = partial(Base58.encode_check, - hash_fn=lib_tx.DeserializerDecred.blake256d) - DECODE_CHECK = partial(Base58.decode_check, - hash_fn=lib_tx.DeserializerDecred.blake256d) - HEADER_VALUES = ['version', 'prev_block_hash', 'merkle_root', 'stake_root', - 'vote_bits', 'final_state', 'voters', 'fresh_stake', - 'revocations', 'pool_size', 'bits', 'sbits', - 'block_height', 'size', 'timestamp', 'nonce', - 'extra_data', 'stake_version'] - HEADER_UNPACK = struct.Struct( - '< i 32s 32s 32s H 6s H B B I I Q I I I I 32s I').unpack_from - TX_COUNT = 4629388 - TX_COUNT_HEIGHT = 260628 - TX_PER_BLOCK = 17 - REORG_LIMIT = 1000 - RPC_PORT = 9109 - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - return cls.HEADER_HASH(header) - - @classmethod - def block(cls, raw_block, height): - """Return a Block namedtuple given a raw block and its height.""" - if height > 0: - return super().block(raw_block, height) - else: - return Block(raw_block, cls.block_header(raw_block, height), []) - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - h['stake_root'] = hash_to_hex_str(h['stake_root']) - h['final_state'] = hash_to_hex_str(h['final_state']) - h['extra_data'] = hash_to_hex_str(h['extra_data']) - return h - - -class DecredTestnet(Decred): - SHORTNAME = "tDCR" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587d1") - XPRV_VERBYTES = bytes.fromhex("04358397") - P2PKH_VERBYTE = bytes.fromhex("0f21") - P2SH_VERBYTES = [bytes.fromhex("0efc")] - WIF_BYTE = bytes.fromhex("230e") - GENESIS_HASH = ( - 'a649dce53918caf422e9c711c858837e08d626ecfcd198969b24f7b634a49bac') - BASIC_HEADER_SIZE = 180 - ALLOW_ADVANCING_ERRORS = True - TX_COUNT = 217380620 - TX_COUNT_HEIGHT = 464000 - TX_PER_BLOCK = 1800 - REORG_LIMIT = 1000 - RPC_PORT = 19109 - - -class Axe(Dash): - NAME = "Axe" - SHORTNAME = "AXE" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("02fe52cc") - XPRV_VERBYTES = bytes.fromhex("02fe52f8") - P2PKH_VERBYTE = bytes.fromhex("37") - P2SH_VERBYTES = [bytes.fromhex("10")] - WIF_BYTE = bytes.fromhex("cc") - GENESIS_HASH = ('00000c33631ca6f2f61368991ce2dc03' - '306b5bb50bf7cede5cfbba6db38e52e6') - DAEMON = daemon.DashDaemon - TX_COUNT = 18405 - TX_COUNT_HEIGHT = 30237 - TX_PER_BLOCK = 1 - RPC_PORT = 9337 - REORG_LIMIT = 1000 - PEERS: List[str] = [] - - @classmethod - def header_hash(cls, header): - """ - Given a header return the hash for AXE. - Need to download `axe_hash` module - Source code: https://github.com/AXErunners/axe_hash - """ - import x11_hash - return x11_hash.getPoWHash(header) - - -class Xuez(Coin): - NAME = "Xuez" - SHORTNAME = "XUEZ" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("022d2533") - XPRV_VERBYTES = bytes.fromhex("0221312b") - P2PKH_VERBYTE = bytes.fromhex("48") - P2SH_VERBYTES = [bytes.fromhex("12")] - WIF_BYTE = bytes.fromhex("d4") - GENESIS_HASH = ('000000e1febc39965b055e8e0117179a' - '4d18e24e7aaa0c69864c4054b4f29445') - TX_COUNT = 30000 - TX_COUNT_HEIGHT = 15000 - TX_PER_BLOCK = 1 - RPC_PORT = 41799 - REORG_LIMIT = 1000 - BASIC_HEADER_SIZE = 112 - PEERS: List[str] = [] - - @classmethod - def header_hash(cls, header): - """ - Given a header return the hash for Xuez. - Need to download `xevan_hash` module - Source code: https://github.com/xuez/xuez - """ - version, = util.unpack_le_uint32_from(header) - - import xevan_hash - - if version == 1: - return xevan_hash.getPoWHash(header[:80]) - else: - return xevan_hash.getPoWHash(header) - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - if h['version'] > 1: - h['nAccumulatorCheckpoint'] = hash_to_hex_str(header[80:]) - return h - - -class Pac(Coin): - NAME = "PAC" - SHORTNAME = "PAC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - GENESIS_HASH = ('00000354655ff039a51273fe61d3b493' - 'bd2897fe6c16f732dbc4ae19f04b789e') - P2PKH_VERBYTE = bytes.fromhex("37") - P2SH_VERBYTES = [bytes.fromhex("0A")] - WIF_BYTE = bytes.fromhex("CC") - TX_COUNT_HEIGHT = 14939 - TX_COUNT = 23708 - TX_PER_BLOCK = 2 - RPC_PORT = 7111 - PEERS = [ - 'electrum.paccoin.io s t', - 'electro-pac.paccoin.io s t' - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - ESTIMATE_FEE = 0.00001 - RELAY_FEE = 0.00001 - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import x11_hash - return x11_hash.getPoWHash(header) - - -class PacTestnet(Pac): - SHORTNAME = "tPAC" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587CF") - XPRV_VERBYTES = bytes.fromhex("04358394") - GENESIS_HASH = ('00000da63bd9478b655ef6bf1bf76cd9' - 'af05202ab68643f9091e049b2b5280ed') - P2PKH_VERBYTE = bytes.fromhex("78") - P2SH_VERBYTES = [bytes.fromhex("0E")] - WIF_BYTE = bytes.fromhex("EF") - TX_COUNT_HEIGHT = 16275 - TX_COUNT = 16275 - TX_PER_BLOCK = 1 - RPC_PORT = 17111 - - -class Polis(Coin): - NAME = "Polis" - SHORTNAME = "POLIS" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("03E25D7E") - XPRV_VERBYTES = bytes.fromhex("03E25945") - GENESIS_HASH = ('000009701eb781a8113b1af1d814e2f0' - '60f6408a2c990db291bc5108a1345c1e') - P2PKH_VERBYTE = bytes.fromhex("37") - P2SH_VERBYTES = [bytes.fromhex("38")] - WIF_BYTE = bytes.fromhex("3c") - TX_COUNT_HEIGHT = 111111 - TX_COUNT = 256128 - TX_PER_BLOCK = 4 - RPC_PORT = 24127 - PEERS = [ - 'electrum1-polis.polispay.org', - 'electrum2-polis.polispay.org' - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import x11_hash - return x11_hash.getPoWHash(header) - - -class ColossusXT(Coin): - NAME = "ColossusXT" - SHORTNAME = "COLX" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("022D2533") - XPRV_VERBYTES = bytes.fromhex("0221312B") - GENESIS_HASH = ('a0ce8206c908357008c1b9a8ba2813af' - 'f0989ca7f72d62b14e652c55f02b4f5c') - P2PKH_VERBYTE = bytes.fromhex("1E") - P2SH_VERBYTES = [bytes.fromhex("0D")] - WIF_BYTE = bytes.fromhex("D4") - TX_COUNT_HEIGHT = 356500 - TX_COUNT = 761041 - TX_PER_BLOCK = 4 - RPC_PORT = 51473 - PEERS = [ - 'electrum1-colx.polispay.org', - 'electrum2-colx.polispay.org' - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import quark_hash - return quark_hash.getPoWHash(header) - - -class GoByte(Coin): - NAME = "GoByte" - SHORTNAME = "GBX" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - GENESIS_HASH = ('0000033b01055cf8df90b01a14734cae' - '92f7039b9b0e48887b4e33a469d7bc07') - P2PKH_VERBYTE = bytes.fromhex("26") - P2SH_VERBYTES = [bytes.fromhex("0A")] - WIF_BYTE = bytes.fromhex("C6") - TX_COUNT_HEIGHT = 115890 - TX_COUNT = 245030 - TX_PER_BLOCK = 4 - RPC_PORT = 12454 - PEERS = [ - 'electrum1-gbx.polispay.org', - 'electrum2-gbx.polispay.org' - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import neoscrypt - return neoscrypt.getPoWHash(header) - - -class Monoeci(Coin): - NAME = "Monoeci" - SHORTNAME = "XMCC" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488B21E") - XPRV_VERBYTES = bytes.fromhex("0488ADE4") - GENESIS_HASH = ('0000005be1eb05b05fb45ae38ee9c144' - '1514a65343cd146100a574de4278f1a3') - P2PKH_VERBYTE = bytes.fromhex("32") - P2SH_VERBYTES = [bytes.fromhex("49")] - WIF_BYTE = bytes.fromhex("4D") - TX_COUNT_HEIGHT = 140000 - TX_COUNT = 140000 - TX_PER_BLOCK = 4 - RPC_PORT = 24156 - PEERS = [ - 'electrum1-gbx.polispay.org', - 'electrum2-gbx.polispay.org' - ] - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import x11_hash - return x11_hash.getPoWHash(header) - - -class Minexcoin(EquihashMixin, Coin): - NAME = "Minexcoin" - SHORTNAME = "MNX" - NET = "mainnet" - P2PKH_VERBYTE = bytes.fromhex("4b") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('490a36d9451a55ed197e34aca7414b35' - 'd775baa4a8e896f1c577f65ce2d214cb') - STATIC_BLOCK_HEADERS = True - BASIC_HEADER_SIZE = 209 - HEADER_SIZE_NO_SOLUTION = 140 - TX_COUNT = 327963 - TX_COUNT_HEIGHT = 74495 - TX_PER_BLOCK = 5 - RPC_PORT = 8022 - CHUNK_SIZE = 960 - PEERS = [ - 'elex01-ams.turinex.eu s t', - 'eu.minexpool.nl s t' - ] - - @classmethod - def electrum_header(cls, header, height): - h = super().electrum_header(header, height) - h['solution'] = hash_to_hex_str(header[cls.HEADER_SIZE_NO_SOLUTION:]) - return h - - @classmethod - def block_header(cls, block, height): - """Return the block header bytes""" - deserializer = cls.DESERIALIZER(block) - return deserializer.read_header(height, cls.HEADER_SIZE_NO_SOLUTION) - - -class Groestlcoin(Coin): - NAME = "Groestlcoin" - SHORTNAME = "GRS" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("24") - P2SH_VERBYTES = [bytes.fromhex("05")] - WIF_BYTE = bytes.fromhex("80") - GENESIS_HASH = ('00000ac5927c594d49cc0bdb81759d0d' - 'a8297eb614683d3acb62f0703b639023') - DESERIALIZER = lib_tx.DeserializerGroestlcoin - TX_COUNT = 115900 - TX_COUNT_HEIGHT = 1601528 - TX_PER_BLOCK = 5 - RPC_PORT = 1441 - PEERS = [ - 'electrum1.groestlcoin.org s t', - 'electrum2.groestlcoin.org s t', - '6brsrbiinpc32tfc.onion t', - 'xkj42efxrcy6vbfw.onion t', - ] - - def grshash(data): - import groestlcoin_hash - return groestlcoin_hash.getHash(data, len(data)) - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - return cls.grshash(header) - - ENCODE_CHECK = partial(Base58.encode_check, hash_fn=grshash) - DECODE_CHECK = partial(Base58.decode_check, hash_fn=grshash) - - -class GroestlcoinTestnet(Groestlcoin): - SHORTNAME = "TGRS" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("6f") - P2SH_VERBYTES = [bytes.fromhex("c4")] - WIF_BYTE = bytes.fromhex("ef") - GENESIS_HASH = ('000000ffbb50fc9898cdd36ec163e6ba' - '23230164c0052a28876255b7dcf2cd36') - RPC_PORT = 17766 - PEERS = [ - 'electrum-test1.groestlcoin.org s t', - 'electrum-test2.groestlcoin.org s t', - '7frvhgofuf522b5i.onion t', - 'aocojvqcybdoxekv.onion t', - ] - - -class Pivx(Coin): - NAME = "Pivx" - SHORTNAME = "PIVX" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("022D2533") - XPRV_VERBYTES = bytes.fromhex("0221312B") - P2PKH_VERBYTE = bytes.fromhex("1e") - P2SH_VERBYTES = [bytes.fromhex("0d")] - WIF_BYTE = bytes.fromhex("d4") - GENESIS_HASH = ('0000041e482b9b9691d98eefb4847340' - '5c0b8ec31b76df3797c74a78680ef818') - BASIC_HEADER_SIZE = 80 - HDR_V4_SIZE = 112 - HDR_V4_HEIGHT = 863787 - HDR_V4_START_OFFSET = HDR_V4_HEIGHT * BASIC_HEADER_SIZE - TX_COUNT = 2930206 - TX_COUNT_HEIGHT = 1299212 - TX_PER_BLOCK = 2 - RPC_PORT = 51473 - - @classmethod - def static_header_offset(cls, height): - assert cls.STATIC_BLOCK_HEADERS - if height >= cls.HDR_V4_HEIGHT: - relative_v4_offset = (height - cls.HDR_V4_HEIGHT) * cls.HDR_V4_SIZE - return cls.HDR_V4_START_OFFSET + relative_v4_offset - else: - return height * cls.BASIC_HEADER_SIZE - - @classmethod - def header_hash(cls, header): - version, = util.unpack_le_uint32_from(header) - if version >= 4: - return super().header_hash(header) - else: - import quark_hash - return quark_hash.getPoWHash(header) - - -class PivxTestnet(Pivx): - SHORTNAME = "tPIVX" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("3a8061a0") - XPRV_VERBYTES = bytes.fromhex("3a805837") - P2PKH_VERBYTE = bytes.fromhex("8B") - P2SH_VERBYTES = [bytes.fromhex("13")] - WIF_BYTE = bytes.fromhex("EF") - GENESIS_HASH = ( - '0000041e482b9b9691d98eefb48473405c0b8ec31b76df3797c74a78680ef818') - BASIC_HEADER_SIZE = 80 - HDR_V4_SIZE = 112 - HDR_V4_HEIGHT = 863787 - HDR_V4_START_OFFSET = HDR_V4_HEIGHT * BASIC_HEADER_SIZE - TX_COUNT = 2157510 - TX_COUNT_HEIGHT = 569399 - TX_PER_BLOCK = 2 - RPC_PORT = 51472 - - -class Bitg(Coin): - - NAME = "BitcoinGreen" - SHORTNAME = "BITG" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - P2PKH_VERBYTE = bytes.fromhex("26") - P2SH_VERBYTES = [bytes.fromhex("06")] - WIF_BYTE = bytes.fromhex("2e") - GENESIS_HASH = ( - '000008467c3a9c587533dea06ad9380cded3ed32f9742a6c0c1aebc21bf2bc9b') - DAEMON = daemon.DashDaemon - TX_COUNT = 1000 - TX_COUNT_HEIGHT = 10000 - TX_PER_BLOCK = 1 - RPC_PORT = 9332 - REORG_LIMIT = 1000 - SESSIONCLS = DashElectrumX - DAEMON = daemon.DashDaemon - - @classmethod - def header_hash(cls, header): - """Given a header return the hash.""" - import quark_hash - return quark_hash.getPoWHash(header) - - -class tBitg(Bitg): - SHORTNAME = "tBITG" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - P2PKH_VERBYTE = bytes.fromhex("62") - P2SH_VERBYTES = [bytes.fromhex("0c")] - WIF_BYTE = bytes.fromhex("6c") - GENESIS_HASH = ( - '000008467c3a9c587533dea06ad9380cded3ed32f9742a6c0c1aebc21bf2bc9b') - RPC_PORT = 19332 - - -class CivX(Coin): - NAME = "CivX" - SHORTNAME = "CIVX" - NET = "mainnet" - XPUB_VERBYTES = bytes.fromhex("0488b21e") - XPRV_VERBYTES = bytes.fromhex("0488ade4") - GENESIS_HASH = ('00000036090a68c523471da7a4f0f958' - 'c1b4403fef74a003be7f71877699cab7') - P2PKH_VERBYTE = bytes.fromhex("1C") - P2SH_VERBYTE = [bytes.fromhex("57")] - WIF_BYTE = bytes.fromhex("9C") - RPC_PORT = 4561 - TX_COUNT = 1000 - TX_COUNT_HEIGHT = 10000 - TX_PER_BLOCK = 4 - DAEMON = daemon.PreLegacyRPCDaemon - DESERIALIZER = lib_tx.DeserializerTxTime - - @classmethod - def header_hash(cls, header): - version, = util.unpack_le_uint32_from(header) - - if version > 2: - return double_sha256(header) - else: - return hex_str_to_hash(CivX.GENESIS_HASH) - - -class CivXTestnet(CivX): - SHORTNAME = "tCIVX" - NET = "testnet" - XPUB_VERBYTES = bytes.fromhex("043587cf") - XPRV_VERBYTES = bytes.fromhex("04358394") - GENESIS_HASH = ('0000059bb2c2048493efcb0f1a034972' - 'b3ce4089d54c93b69aaab212fb369887') - P2PKH_VERBYTE = bytes.fromhex("4B") - P2SH_VERBYTE = [bytes.fromhex("CE")] - WIF_BYTE = bytes.fromhex("CB") - RPC_PORT = 14561 - - @classmethod - def header_hash(cls, header): - version, = util.unpack_le_uint32_from(header) - - if version > 2: - return double_sha256(header) - else: - return hex_str_to_hash(CivXTestnet.GENESIS_HASH) diff --git a/torba/torba/server/daemon.py b/torba/torba/server/daemon.py deleted file mode 100644 index 60dc574cd..000000000 --- a/torba/torba/server/daemon.py +++ /dev/null @@ -1,467 +0,0 @@ -# Copyright (c) 2016-2017, Neil Booth -# -# All rights reserved. -# -# See the file "LICENCE" for information about the copyright -# and warranty status of this software. - -"""Class for handling asynchronous connections to a blockchain -daemon.""" - -import asyncio -import itertools -import json -import time -from calendar import timegm -from struct import pack -from time import strptime - -import aiohttp - -from torba.server.util import hex_to_bytes, class_logger,\ - unpack_le_uint16_from, pack_varint -from torba.server.hash import hex_str_to_hash, hash_to_hex_str -from torba.server.tx import DeserializerDecred -from torba.rpc import JSONRPC - - -class DaemonError(Exception): - """Raised when the daemon returns an error in its results.""" - - -class WarmingUpError(Exception): - """Internal - when the daemon is warming up.""" - - -class WorkQueueFullError(Exception): - """Internal - when the daemon's work queue is full.""" - - -class Daemon: - """Handles connections to a daemon at the given URL.""" - - WARMING_UP = -28 - id_counter = itertools.count() - - def __init__(self, coin, url, max_workqueue=10, init_retry=0.25, - max_retry=4.0): - self.coin = coin - self.logger = class_logger(__name__, self.__class__.__name__) - self.set_url(url) - # Limit concurrent RPC calls to this number. - # See DEFAULT_HTTP_WORKQUEUE in bitcoind, which is typically 16 - self.workqueue_semaphore = asyncio.Semaphore(value=max_workqueue) - self.init_retry = init_retry - self.max_retry = max_retry - self._height = None - self.available_rpcs = {} - self.connector = aiohttp.TCPConnector() - - async def close(self): - if self.connector: - await self.connector.close() - self.connector = None - - def set_url(self, url): - """Set the URLS to the given list, and switch to the first one.""" - urls = url.split(',') - urls = [self.coin.sanitize_url(url) for url in urls] - for n, url in enumerate(urls): - status = '' if n else ' (current)' - logged_url = self.logged_url(url) - self.logger.info(f'daemon #{n + 1} at {logged_url}{status}') - self.url_index = 0 - self.urls = urls - - def current_url(self): - """Returns the current daemon URL.""" - return self.urls[self.url_index] - - def logged_url(self, url=None): - """The host and port part, for logging.""" - url = url or self.current_url() - return url[url.rindex('@') + 1:] - - def failover(self): - """Call to fail-over to the next daemon URL. - - Returns False if there is only one, otherwise True. - """ - if len(self.urls) > 1: - self.url_index = (self.url_index + 1) % len(self.urls) - self.logger.info(f'failing over to {self.logged_url()}') - return True - return False - - def client_session(self): - """An aiohttp client session.""" - return aiohttp.ClientSession(connector=self.connector, connector_owner=False) - - async def _send_data(self, data): - if not self.connector: - raise asyncio.CancelledError('Tried to send request during shutdown.') - async with self.workqueue_semaphore: - async with self.client_session() as session: - async with session.post(self.current_url(), data=data) as resp: - kind = resp.headers.get('Content-Type', None) - if kind == 'application/json': - return await resp.json() - # bitcoind's HTTP protocol "handling" is a bad joke - text = await resp.text() - if 'Work queue depth exceeded' in text: - raise WorkQueueFullError - text = text.strip() or resp.reason - self.logger.error(text) - raise DaemonError(text) - - async def _send(self, payload, processor): - """Send a payload to be converted to JSON. - - Handles temporary connection issues. Daemon response errors - are raise through DaemonError. - """ - def log_error(error): - nonlocal last_error_log, retry - now = time.time() - if now - last_error_log > 60: - last_error_log = now - self.logger.error(f'{error} Retrying occasionally...') - if retry == self.max_retry and self.failover(): - retry = 0 - - on_good_message = None - last_error_log = 0 - data = json.dumps(payload) - retry = self.init_retry - while True: - try: - result = await self._send_data(data) - result = processor(result) - if on_good_message: - self.logger.info(on_good_message) - return result - except asyncio.TimeoutError: - log_error('timeout error.') - except aiohttp.ServerDisconnectedError: - log_error('disconnected.') - on_good_message = 'connection restored' - except aiohttp.ClientConnectionError: - log_error('connection problem - is your daemon running?') - on_good_message = 'connection restored' - except aiohttp.ClientError as e: - log_error(f'daemon error: {e}') - on_good_message = 'running normally' - except WarmingUpError: - log_error('starting up checking blocks.') - on_good_message = 'running normally' - except WorkQueueFullError: - log_error('work queue full.') - on_good_message = 'running normally' - - await asyncio.sleep(retry) - retry = max(min(self.max_retry, retry * 2), self.init_retry) - - async def _send_single(self, method, params=None): - """Send a single request to the daemon.""" - def processor(result): - err = result['error'] - if not err: - return result['result'] - if err.get('code') == self.WARMING_UP: - raise WarmingUpError - raise DaemonError(err) - - payload = {'method': method, 'id': next(self.id_counter)} - if params: - payload['params'] = params - return await self._send(payload, processor) - - async def _send_vector(self, method, params_iterable, replace_errs=False): - """Send several requests of the same method. - - The result will be an array of the same length as params_iterable. - If replace_errs is true, any item with an error is returned as None, - otherwise an exception is raised.""" - def processor(result): - errs = [item['error'] for item in result if item['error']] - if any(err.get('code') == self.WARMING_UP for err in errs): - raise WarmingUpError - if not errs or replace_errs: - return [item['result'] for item in result] - raise DaemonError(errs) - - payload = [{'method': method, 'params': p, 'id': next(self.id_counter)} - for p in params_iterable] - if payload: - return await self._send(payload, processor) - return [] - - async def _is_rpc_available(self, method): - """Return whether given RPC method is available in the daemon. - - Results are cached and the daemon will generally not be queried with - the same method more than once.""" - available = self.available_rpcs.get(method) - if available is None: - available = True - try: - await self._send_single(method) - except DaemonError as e: - err = e.args[0] - error_code = err.get("code") - available = error_code != JSONRPC.METHOD_NOT_FOUND - self.available_rpcs[method] = available - return available - - async def block_hex_hashes(self, first, count): - """Return the hex hashes of count block starting at height first.""" - params_iterable = ((h, ) for h in range(first, first + count)) - return await self._send_vector('getblockhash', params_iterable) - - async def deserialised_block(self, hex_hash): - """Return the deserialised block with the given hex hash.""" - return await self._send_single('getblock', (hex_hash, True)) - - async def raw_blocks(self, hex_hashes): - """Return the raw binary blocks with the given hex hashes.""" - params_iterable = ((h, False) for h in hex_hashes) - blocks = await self._send_vector('getblock', params_iterable) - # Convert hex string to bytes - return [hex_to_bytes(block) for block in blocks] - - async def mempool_hashes(self): - """Update our record of the daemon's mempool hashes.""" - return await self._send_single('getrawmempool') - - async def estimatefee(self, block_count): - """Return the fee estimate for the block count. Units are whole - currency units per KB, e.g. 0.00000995, or -1 if no estimate - is available. - """ - args = (block_count, ) - if await self._is_rpc_available('estimatesmartfee'): - estimate = await self._send_single('estimatesmartfee', args) - return estimate.get('feerate', -1) - return await self._send_single('estimatefee', args) - - async def getnetworkinfo(self): - """Return the result of the 'getnetworkinfo' RPC call.""" - return await self._send_single('getnetworkinfo') - - async def relayfee(self): - """The minimum fee a low-priority tx must pay in order to be accepted - to the daemon's memory pool.""" - network_info = await self.getnetworkinfo() - return network_info['relayfee'] - - async def getrawtransaction(self, hex_hash, verbose=False): - """Return the serialized raw transaction with the given hash.""" - # Cast to int because some coin daemons are old and require it - return await self._send_single('getrawtransaction', - (hex_hash, int(verbose))) - - async def getrawtransactions(self, hex_hashes, replace_errs=True): - """Return the serialized raw transactions with the given hashes. - - Replaces errors with None by default.""" - params_iterable = ((hex_hash, 0) for hex_hash in hex_hashes) - txs = await self._send_vector('getrawtransaction', params_iterable, - replace_errs=replace_errs) - # Convert hex strings to bytes - return [hex_to_bytes(tx) if tx else None for tx in txs] - - async def broadcast_transaction(self, raw_tx): - """Broadcast a transaction to the network.""" - return await self._send_single('sendrawtransaction', (raw_tx, )) - - async def height(self): - """Query the daemon for its current height.""" - self._height = await self._send_single('getblockcount') - return self._height - - def cached_height(self): - """Return the cached daemon height. - - If the daemon has not been queried yet this returns None.""" - return self._height - - -class DashDaemon(Daemon): - - async def masternode_broadcast(self, params): - """Broadcast a transaction to the network.""" - return await self._send_single('masternodebroadcast', params) - - async def masternode_list(self, params): - """Return the masternode status.""" - return await self._send_single('masternodelist', params) - - -class FakeEstimateFeeDaemon(Daemon): - """Daemon that simulates estimatefee and relayfee RPC calls. Coin that - wants to use this daemon must define ESTIMATE_FEE & RELAY_FEE""" - - async def estimatefee(self, block_count): - """Return the fee estimate for the given parameters.""" - return self.coin.ESTIMATE_FEE - - async def relayfee(self): - """The minimum fee a low-priority tx must pay in order to be accepted - to the daemon's memory pool.""" - return self.coin.RELAY_FEE - - -class LegacyRPCDaemon(Daemon): - """Handles connections to a daemon at the given URL. - - This class is useful for daemons that don't have the new 'getblock' - RPC call that returns the block in hex, the workaround is to manually - recreate the block bytes. The recreated block bytes may not be the exact - as in the underlying blockchain but it is good enough for our indexing - purposes.""" - - async def raw_blocks(self, hex_hashes): - """Return the raw binary blocks with the given hex hashes.""" - params_iterable = ((h, ) for h in hex_hashes) - block_info = await self._send_vector('getblock', params_iterable) - - blocks = [] - for i in block_info: - raw_block = await self.make_raw_block(i) - blocks.append(raw_block) - - # Convert hex string to bytes - return blocks - - async def make_raw_header(self, b): - pbh = b.get('previousblockhash') - if pbh is None: - pbh = '0' * 64 - return b''.join([ - pack(' 0: - transactions = await self.getrawtransactions(b.get('tx'), False) - - raw_block = header - num_txs = len(transactions) - if num_txs > 0: - raw_block += pack_varint(num_txs) - raw_block += b''.join(transactions) - else: - raw_block += b'\x00' - - return raw_block - - def timestamp_safe(self, t): - if isinstance(t, int): - return t - return timegm(strptime(t, "%Y-%m-%d %H:%M:%S %Z")) - - -class DecredDaemon(Daemon): - async def raw_blocks(self, hex_hashes): - """Return the raw binary blocks with the given hex hashes.""" - - params_iterable = ((h, False) for h in hex_hashes) - blocks = await self._send_vector('getblock', params_iterable) - - raw_blocks = [] - valid_tx_tree = {} - for block in blocks: - # Convert to bytes from hex - raw_block = hex_to_bytes(block) - raw_blocks.append(raw_block) - # Check if previous block is valid - prev = self.prev_hex_hash(raw_block) - votebits = unpack_le_uint16_from(raw_block[100:102])[0] - valid_tx_tree[prev] = self.is_valid_tx_tree(votebits) - - processed_raw_blocks = [] - for hash, raw_block in zip(hex_hashes, raw_blocks): - if hash in valid_tx_tree: - is_valid = valid_tx_tree[hash] - else: - # Do something complicated to figure out if this block is valid - header = await self._send_single('getblockheader', (hash, )) - if 'nextblockhash' not in header: - raise DaemonError(f'Could not find next block for {hash}') - next_hash = header['nextblockhash'] - next_header = await self._send_single('getblockheader', - (next_hash, )) - is_valid = self.is_valid_tx_tree(next_header['votebits']) - - if is_valid: - processed_raw_blocks.append(raw_block) - else: - # If this block is invalid remove the normal transactions - self.logger.info(f'block {hash} is invalidated') - processed_raw_blocks.append(self.strip_tx_tree(raw_block)) - - return processed_raw_blocks - - @staticmethod - def prev_hex_hash(raw_block): - return hash_to_hex_str(raw_block[4:36]) - - @staticmethod - def is_valid_tx_tree(votebits): - # Check if previous block was invalidated. - return bool(votebits & (1 << 0) != 0) - - def strip_tx_tree(self, raw_block): - c = self.coin - assert issubclass(c.DESERIALIZER, DeserializerDecred) - d = c.DESERIALIZER(raw_block, start=c.BASIC_HEADER_SIZE) - d.read_tx_tree() # Skip normal transactions - # Create a fake block without any normal transactions - return raw_block[:c.BASIC_HEADER_SIZE] + b'\x00' + raw_block[d.cursor:] - - async def height(self): - height = await super().height() - if height > 0: - # Lie about the daemon height as the current tip can be invalidated - height -= 1 - self._height = height - return height - - async def mempool_hashes(self): - mempool = await super().mempool_hashes() - # Add current tip transactions to the 'fake' mempool. - real_height = await self._send_single('getblockcount') - tip_hash = await self._send_single('getblockhash', (real_height,)) - tip = await self.deserialised_block(tip_hash) - # Add normal transactions except coinbase - mempool += tip['tx'][1:] - # Add stake transactions if applicable - mempool += tip.get('stx', []) - return mempool - - def client_session(self): - # FIXME allow self signed certificates - connector = aiohttp.TCPConnector(verify_ssl=False) - return aiohttp.ClientSession(connector=connector) - - -class PreLegacyRPCDaemon(LegacyRPCDaemon): - """Handles connections to a daemon at the given URL. - - This class is useful for daemons that don't have the new 'getblock' - RPC call that returns the block in hex, and need the False parameter - for the getblock""" - - async def deserialised_block(self, hex_hash): - """Return the deserialised block with the given hex hash.""" - return await self._send_single('getblock', (hex_hash, False)) diff --git a/torba/torba/server/session.py b/torba/torba/server/session.py deleted file mode 100644 index 57ce6b4b9..000000000 --- a/torba/torba/server/session.py +++ /dev/null @@ -1,1467 +0,0 @@ -# Copyright (c) 2016-2018, Neil Booth -# -# All rights reserved. -# -# See the file "LICENCE" for information about the copyright -# and warranty status of this software. - -"""Classes for local RPC server and remote client TCP/SSL servers.""" -import base64 -import collections -import asyncio -import codecs -import datetime -import itertools -import json -import os -import zlib - -import pylru -import ssl -import time -import typing -from asyncio import Event, sleep -from collections import defaultdict -from functools import partial - -import torba -from torba.rpc import ( - RPCSession, JSONRPCAutoDetect, JSONRPCConnection, - handler_invocation, RPCError, Request -) -from torba.server import text -from torba.server import util -from torba.server.hash import (sha256, hash_to_hex_str, hex_str_to_hash, - HASHX_LEN, Base58Error) -from torba.server.daemon import DaemonError -from torba.server.peers import PeerManager -if typing.TYPE_CHECKING: - from torba.server.env import Env - from torba.server.db import DB - from torba.server.block_processor import BlockProcessor - from torba.server.mempool import MemPool - from torba.server.daemon import Daemon - -BAD_REQUEST = 1 -DAEMON_ERROR = 2 - - -def scripthash_to_hashX(scripthash: str) -> bytes: - try: - bin_hash = hex_str_to_hash(scripthash) - if len(bin_hash) == 32: - return bin_hash[:HASHX_LEN] - except Exception: - pass - raise RPCError(BAD_REQUEST, f'{scripthash} is not a valid script hash') - - -def non_negative_integer(value) -> int: - """Return param value it is or can be converted to a non-negative - integer, otherwise raise an RPCError.""" - try: - value = int(value) - if value >= 0: - return value - except ValueError: - pass - raise RPCError(BAD_REQUEST, - f'{value} should be a non-negative integer') - - -def assert_boolean(value) -> bool: - """Return param value it is boolean otherwise raise an RPCError.""" - if value in (False, True): - return value - raise RPCError(BAD_REQUEST, f'{value} should be a boolean value') - - -def assert_tx_hash(value: str) -> None: - """Raise an RPCError if the value is not a valid transaction - hash.""" - try: - if len(util.hex_to_bytes(value)) == 32: - return - except Exception: - pass - raise RPCError(BAD_REQUEST, f'{value} should be a transaction hash') - - -class Semaphores: - """For aiorpcX's semaphore handling.""" - - def __init__(self, semaphores): - self.semaphores = semaphores - self.acquired = [] - - async def __aenter__(self): - for semaphore in self.semaphores: - await semaphore.acquire() - self.acquired.append(semaphore) - - async def __aexit__(self, exc_type, exc_value, traceback): - for semaphore in self.acquired: - semaphore.release() - - -class SessionGroup: - - def __init__(self, gid: int): - self.gid = gid - # Concurrency per group - self.semaphore = asyncio.Semaphore(20) - - -class SessionManager: - """Holds global state about all sessions.""" - - def __init__(self, env: 'Env', db: 'DB', bp: 'BlockProcessor', daemon: 'Daemon', mempool: 'MemPool', - shutdown_event: asyncio.Event): - env.max_send = max(350000, env.max_send) - self.env = env - self.db = db - self.bp = bp - self.daemon = daemon - self.mempool = mempool - self.peer_mgr = PeerManager(env, db) - self.shutdown_event = shutdown_event - self.logger = util.class_logger(__name__, self.__class__.__name__) - self.servers: typing.Dict[str, asyncio.AbstractServer] = {} - self.sessions: typing.Set['SessionBase'] = set() - self.cur_group = SessionGroup(0) - self.txs_sent = 0 - self.start_time = time.time() - self.history_cache = pylru.lrucache(256) - self.notified_height: typing.Optional[int] = None - # Cache some idea of room to avoid recounting on each subscription - self.subs_room = 0 - # Masternode stuff only for such coins - if issubclass(env.coin.SESSIONCLS, DashElectrumX): - self.mn_cache_height = 0 - self.mn_cache = [] # type: ignore - - self.session_event = Event() - - # Set up the RPC request handlers - cmds = ('add_peer daemon_url disconnect getinfo groups log peers ' - 'query reorg sessions stop'.split()) - LocalRPC.request_handlers.update( - {cmd: getattr(self, 'rpc_' + cmd) for cmd in cmds} - ) - - async def _start_server(self, kind, *args, **kw_args): - loop = asyncio.get_event_loop() - if kind == 'RPC': - protocol_class = LocalRPC - else: - protocol_class = self.env.coin.SESSIONCLS - protocol_factory = partial(protocol_class, self, self.db, - self.mempool, self.peer_mgr, kind) - - host, port = args[:2] - try: - self.servers[kind] = await loop.create_server(protocol_factory, *args, **kw_args) - except OSError as e: # don't suppress CancelledError - self.logger.error(f'{kind} server failed to listen on {host}:' - f'{port:d} :{e!r}') - else: - self.logger.info(f'{kind} server listening on {host}:{port:d}') - - async def _start_external_servers(self): - """Start listening on TCP and SSL ports, but only if the respective - port was given in the environment. - """ - env = self.env - host = env.cs_host(for_rpc=False) - if env.tcp_port is not None: - await self._start_server('TCP', host, env.tcp_port) - if env.ssl_port is not None: - sslc = ssl.SSLContext(ssl.PROTOCOL_TLS) - sslc.load_cert_chain(env.ssl_certfile, keyfile=env.ssl_keyfile) - await self._start_server('SSL', host, env.ssl_port, ssl=sslc) - - async def _close_servers(self, kinds): - """Close the servers of the given kinds (TCP etc.).""" - if kinds: - self.logger.info('closing down {} listening servers' - .format(', '.join(kinds))) - for kind in kinds: - server = self.servers.pop(kind, None) - if server: - server.close() - await server.wait_closed() - - async def _manage_servers(self): - paused = False - max_sessions = self.env.max_sessions - low_watermark = max_sessions * 19 // 20 - while True: - await self.session_event.wait() - self.session_event.clear() - if not paused and len(self.sessions) >= max_sessions: - self.logger.info(f'maximum sessions {max_sessions:,d} ' - f'reached, stopping new connections until ' - f'count drops to {low_watermark:,d}') - await self._close_servers(['TCP', 'SSL']) - paused = True - # Start listening for incoming connections if paused and - # session count has fallen - if paused and len(self.sessions) <= low_watermark: - self.logger.info('resuming listening for incoming connections') - await self._start_external_servers() - paused = False - - async def _log_sessions(self): - """Periodically log sessions.""" - log_interval = self.env.log_sessions - if log_interval: - while True: - await sleep(log_interval) - data = self._session_data(for_log=True) - for line in text.sessions_lines(data): - self.logger.info(line) - self.logger.info(json.dumps(self._get_info())) - - def _group_map(self): - group_map = defaultdict(list) - for session in self.sessions: - group_map[session.group].append(session) - return group_map - - def _sub_count(self) -> int: - return sum(s.sub_count() for s in self.sessions) - - def _lookup_session(self, session_id): - try: - session_id = int(session_id) - except Exception: - pass - else: - for session in self.sessions: - if session.session_id == session_id: - return session - return None - - async def _for_each_session(self, session_ids, operation): - if not isinstance(session_ids, list): - raise RPCError(BAD_REQUEST, 'expected a list of session IDs') - - result = [] - for session_id in session_ids: - session = self._lookup_session(session_id) - if session: - result.append(await operation(session)) - else: - result.append(f'unknown session: {session_id}') - return result - - async def _clear_stale_sessions(self): - """Cut off sessions that haven't done anything for 10 minutes.""" - session_timeout = self.env.session_timeout - while True: - await sleep(session_timeout // 10) - stale_cutoff = time.perf_counter() - session_timeout - stale_sessions = [session for session in self.sessions - if session.last_recv < stale_cutoff] - if stale_sessions: - text = ', '.join(str(session.session_id) - for session in stale_sessions) - self.logger.info(f'closing stale connections {text}') - # Give the sockets some time to close gracefully - if stale_sessions: - await asyncio.wait([ - session.close(force_after=session_timeout // 10) for session in stale_sessions - ]) - - # Consolidate small groups - group_map = self._group_map() - groups = [group for group, sessions in group_map.items() - if len(sessions) <= 5] # fixme: apply session cost here - if len(groups) > 1: - new_group = groups[-1] - for group in groups: - for session in group_map[group]: - session.group = new_group - - def _get_info(self): - """A summary of server state.""" - group_map = self._group_map() - method_counts = collections.defaultdict(int) - error_count = 0 - logged = 0 - paused = 0 - pending_requests = 0 - closing = 0 - - for s in self.sessions: - error_count += s.errors - if s.log_me: - logged += 1 - if not s._can_send.is_set(): - paused += 1 - pending_requests += s.count_pending_items() - if s.is_closing(): - closing += 1 - for request, _ in s.connection._requests.values(): - method_counts[request.method] += 1 - return { - 'closing': closing, - 'daemon': self.daemon.logged_url(), - 'daemon_height': self.daemon.cached_height(), - 'db_height': self.db.db_height, - 'errors': error_count, - 'groups': len(group_map), - 'logged': logged, - 'paused': paused, - 'pid': os.getpid(), - 'peers': self.peer_mgr.info(), - 'requests': pending_requests, - 'method_counts': method_counts, - 'sessions': self.session_count(), - 'subs': self._sub_count(), - 'txs_sent': self.txs_sent, - 'uptime': util.formatted_time(time.time() - self.start_time), - 'version': torba.__version__, - } - - def _session_data(self, for_log): - """Returned to the RPC 'sessions' call.""" - now = time.time() - sessions = sorted(self.sessions, key=lambda s: s.start_time) - return [(session.session_id, - session.flags(), - session.peer_address_str(for_log=for_log), - session.client, - session.protocol_version_string(), - session.count_pending_items(), - session.txs_sent, - session.sub_count(), - session.recv_count, session.recv_size, - session.send_count, session.send_size, - now - session.start_time) - for session in sessions] - - def _group_data(self): - """Returned to the RPC 'groups' call.""" - result = [] - group_map = self._group_map() - for group, sessions in group_map.items(): - result.append([group.gid, - len(sessions), - sum(s.bw_charge for s in sessions), - sum(s.count_pending_items() for s in sessions), - sum(s.txs_sent for s in sessions), - sum(s.sub_count() for s in sessions), - sum(s.recv_count for s in sessions), - sum(s.recv_size for s in sessions), - sum(s.send_count for s in sessions), - sum(s.send_size for s in sessions), - ]) - return result - - async def _electrum_and_raw_headers(self, height): - raw_header = await self.raw_header(height) - electrum_header = self.env.coin.electrum_header(raw_header, height) - return electrum_header, raw_header - - async def _refresh_hsub_results(self, height): - """Refresh the cached header subscription responses to be for height, - and record that as notified_height. - """ - # Paranoia: a reorg could race and leave db_height lower - height = min(height, self.db.db_height) - electrum, raw = await self._electrum_and_raw_headers(height) - self.hsub_results = (electrum, {'hex': raw.hex(), 'height': height}) - self.notified_height = height - - # --- LocalRPC command handlers - - async def rpc_add_peer(self, real_name): - """Add a peer. - - real_name: "bch.electrumx.cash t50001 s50002" for example - """ - await self.peer_mgr.add_localRPC_peer(real_name) - return f"peer '{real_name}' added" - - async def rpc_disconnect(self, session_ids): - """Disconnect sessions. - - session_ids: array of session IDs - """ - async def close(session): - """Close the session's transport.""" - await session.close(force_after=2) - return f'disconnected {session.session_id}' - - return await self._for_each_session(session_ids, close) - - async def rpc_log(self, session_ids): - """Toggle logging of sessions. - - session_ids: array of session IDs - """ - async def toggle_logging(session): - """Toggle logging of the session.""" - session.toggle_logging() - return f'log {session.session_id}: {session.log_me}' - - return await self._for_each_session(session_ids, toggle_logging) - - async def rpc_daemon_url(self, daemon_url): - """Replace the daemon URL.""" - daemon_url = daemon_url or self.env.daemon_url - try: - self.daemon.set_url(daemon_url) - except Exception as e: - raise RPCError(BAD_REQUEST, f'an error occurred: {e!r}') - return f'now using daemon at {self.daemon.logged_url()}' - - async def rpc_stop(self): - """Shut down the server cleanly.""" - self.shutdown_event.set() - return 'stopping' - - async def rpc_getinfo(self): - """Return summary information about the server process.""" - return self._get_info() - - async def rpc_groups(self): - """Return statistics about the session groups.""" - return self._group_data() - - async def rpc_peers(self): - """Return a list of data about server peers.""" - return self.peer_mgr.rpc_data() - - async def rpc_query(self, items, limit): - """Return a list of data about server peers.""" - coin = self.env.coin - db = self.db - lines = [] - - def arg_to_hashX(arg): - try: - script = bytes.fromhex(arg) - lines.append(f'Script: {arg}') - return coin.hashX_from_script(script) - except ValueError: - pass - - try: - hashX = coin.address_to_hashX(arg) - except Base58Error as e: - lines.append(e.args[0]) - return None - lines.append(f'Address: {arg}') - return hashX - - for arg in items: - hashX = arg_to_hashX(arg) - if not hashX: - continue - n = None - history = await db.limited_history(hashX, limit=limit) - for n, (tx_hash, height) in enumerate(history): - lines.append(f'History #{n:,d}: height {height:,d} ' - f'tx_hash {hash_to_hex_str(tx_hash)}') - if n is None: - lines.append('No history found') - n = None - utxos = await db.all_utxos(hashX) - for n, utxo in enumerate(utxos, start=1): - lines.append(f'UTXO #{n:,d}: tx_hash ' - f'{hash_to_hex_str(utxo.tx_hash)} ' - f'tx_pos {utxo.tx_pos:,d} height ' - f'{utxo.height:,d} value {utxo.value:,d}') - if n == limit: - break - if n is None: - lines.append('No UTXOs found') - - balance = sum(utxo.value for utxo in utxos) - lines.append(f'Balance: {coin.decimal_value(balance):,f} ' - f'{coin.SHORTNAME}') - - return lines - - async def rpc_sessions(self): - """Return statistics about connected sessions.""" - return self._session_data(for_log=False) - - async def rpc_reorg(self, count): - """Force a reorg of the given number of blocks. - - count: number of blocks to reorg - """ - count = non_negative_integer(count) - if not self.bp.force_chain_reorg(count): - raise RPCError(BAD_REQUEST, 'still catching up with daemon') - return f'scheduled a reorg of {count:,d} blocks' - - # --- External Interface - - async def serve(self, notifications, server_listening_event): - """Start the RPC server if enabled. When the event is triggered, - start TCP and SSL servers.""" - try: - if self.env.rpc_port is not None: - await self._start_server('RPC', self.env.cs_host(for_rpc=True), - self.env.rpc_port) - self.logger.info(f'max session count: {self.env.max_sessions:,d}') - self.logger.info(f'session timeout: ' - f'{self.env.session_timeout:,d} seconds') - self.logger.info(f'max response size {self.env.max_send:,d} bytes') - if self.env.drop_client is not None: - self.logger.info(f'drop clients matching: {self.env.drop_client.pattern}') - # Start notifications; initialize hsub_results - await notifications.start(self.db.db_height, self._notify_sessions) - await self.start_other() - await self._start_external_servers() - server_listening_event.set() - # Peer discovery should start after the external servers - # because we connect to ourself - await asyncio.wait([ - self.peer_mgr.discover_peers(), - self._clear_stale_sessions(), - self._log_sessions(), - self._manage_servers() - ]) - finally: - await self._close_servers(list(self.servers.keys())) - if self.sessions: - await asyncio.wait([ - session.close(force_after=1) for session in self.sessions - ]) - await self.stop_other() - - async def start_other(self): - pass - - async def stop_other(self): - pass - - def session_count(self) -> int: - """The number of connections that we've sent something to.""" - return len(self.sessions) - - async def daemon_request(self, method, *args): - """Catch a DaemonError and convert it to an RPCError.""" - try: - return await getattr(self.daemon, method)(*args) - except DaemonError as e: - raise RPCError(DAEMON_ERROR, f'daemon error: {e!r}') from None - - async def raw_header(self, height): - """Return the binary header at the given height.""" - try: - return await self.db.raw_header(height) - except IndexError: - raise RPCError(BAD_REQUEST, f'height {height:,d} ' - 'out of range') from None - - async def electrum_header(self, height): - """Return the deserialized header at the given height.""" - electrum_header, _ = await self._electrum_and_raw_headers(height) - return electrum_header - - async def broadcast_transaction(self, raw_tx): - hex_hash = await self.daemon.broadcast_transaction(raw_tx) - self.mempool.wakeup.set() - self.txs_sent += 1 - return hex_hash - - async def limited_history(self, hashX): - """A caching layer.""" - hc = self.history_cache - if hashX not in hc: - # History DoS limit. Each element of history is about 99 - # bytes when encoded as JSON. This limits resource usage - # on bloated history requests, and uses a smaller divisor - # so large requests are logged before refusing them. - limit = self.env.max_send // 97 - hc[hashX] = await self.db.limited_history(hashX, limit=limit) - return hc[hashX] - - async def _notify_sessions(self, height, touched): - """Notify sessions about height changes and touched addresses.""" - height_changed = height != self.notified_height - if height_changed: - await self._refresh_hsub_results(height) - # Invalidate our history cache for touched hashXs - hc = self.history_cache - for hashX in set(hc).intersection(touched): - del hc[hashX] - - if self.sessions: - await asyncio.wait([ - session.notify(touched, height_changed) for session in self.sessions - ]) - - def add_session(self, session): - self.sessions.add(session) - self.session_event.set() - gid = int(session.start_time - self.start_time) // 900 - if self.cur_group.gid != gid: - self.cur_group = SessionGroup(gid) - return self.cur_group - - def remove_session(self, session): - """Remove a session from our sessions list if there.""" - self.sessions.remove(session) - self.session_event.set() - - -class SessionBase(RPCSession): - """Base class of ElectrumX JSON sessions. - - Each session runs its tasks in asynchronous parallelism with other - sessions. - """ - - MAX_CHUNK_SIZE = 40960 - session_counter = itertools.count() - request_handlers: typing.Dict[str, typing.Callable] = {} - version = '0.5.7' - - def __init__(self, session_mgr, db, mempool, peer_mgr, kind): - connection = JSONRPCConnection(JSONRPCAutoDetect) - super().__init__(connection=connection) - self.logger = util.class_logger(__name__, self.__class__.__name__) - self.session_mgr = session_mgr - self.db = db - self.mempool = mempool - self.peer_mgr = peer_mgr - self.kind = kind # 'RPC', 'TCP' etc. - self.env = session_mgr.env - self.coin = self.env.coin - self.client = 'unknown' - self.anon_logs = self.env.anon_logs - self.txs_sent = 0 - self.log_me = False - self.daemon_request = self.session_mgr.daemon_request - # Hijack the connection so we can log messages - self._receive_message_orig = self.connection.receive_message - self.connection.receive_message = self.receive_message - - async def notify(self, touched, height_changed): - pass - - def peer_address_str(self, *, for_log=True): - """Returns the peer's IP address and port as a human-readable - string, respecting anon logs if the output is for a log.""" - if for_log and self.anon_logs: - return 'xx.xx.xx.xx:xx' - return super().peer_address_str() - - def receive_message(self, message): - if self.log_me: - self.logger.info(f'processing {message}') - return self._receive_message_orig(message) - - def toggle_logging(self): - self.log_me = not self.log_me - - def flags(self): - """Status flags.""" - status = self.kind[0] - if self.is_closing(): - status += 'C' - if self.log_me: - status += 'L' - status += str(self._concurrency.max_concurrent) - return status - - def connection_made(self, transport): - """Handle an incoming client connection.""" - super().connection_made(transport) - self.session_id = next(self.session_counter) - context = {'conn_id': f'{self.session_id}'} - self.logger = util.ConnectionLogger(self.logger, context) - self.group = self.session_mgr.add_session(self) - self.logger.info(f'{self.kind} {self.peer_address_str()}, ' - f'{self.session_mgr.session_count():,d} total') - - def connection_lost(self, exc): - """Handle client disconnection.""" - super().connection_lost(exc) - self.session_mgr.remove_session(self) - msg = '' - if not self._can_send.is_set(): - msg += ' whilst paused' - if self._concurrency.max_concurrent != self.max_concurrent: - msg += ' whilst throttled' - if self.send_size >= 1024*1024: - msg += ('. Sent {:,d} bytes in {:,d} messages' - .format(self.send_size, self.send_count)) - if msg: - msg = 'disconnected' + msg - self.logger.info(msg) - - def count_pending_items(self): - return len(self.connection.pending_requests()) - - def semaphore(self): - return Semaphores([self._concurrency.semaphore, self.group.semaphore]) - - def sub_count(self): - return 0 - - async def handle_request(self, request): - """Handle an incoming request. ElectrumX doesn't receive - notifications from client sessions. - """ - if isinstance(request, Request): - handler = self.request_handlers.get(request.method) - else: - handler = None - coro = handler_invocation(handler, request)() - return await coro - - -class ElectrumX(SessionBase): - """A TCP server that handles incoming Electrum connections.""" - - PROTOCOL_MIN = (1, 1) - PROTOCOL_MAX = (1, 4) - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.subscribe_headers = False - self.subscribe_headers_raw = False - self.connection.max_response_size = self.env.max_send - self.hashX_subs = {} - self.sv_seen = False - self.mempool_statuses = {} - self.set_request_handlers(self.PROTOCOL_MIN) - - @classmethod - def protocol_min_max_strings(cls): - return [util.version_string(ver) - for ver in (cls.PROTOCOL_MIN, cls.PROTOCOL_MAX)] - - @classmethod - def server_features(cls, env): - """Return the server features dictionary.""" - min_str, max_str = cls.protocol_min_max_strings() - return { - 'hosts': env.hosts_dict(), - 'pruning': None, - 'server_version': cls.version, - 'protocol_min': min_str, - 'protocol_max': max_str, - 'genesis_hash': env.coin.GENESIS_HASH, - 'description': env.description, - 'payment_address': env.donation_address, - 'daily_fee': env.daily_fee, - 'hash_function': 'sha256', - } - - async def server_features_async(self): - return self.server_features(self.env) - - @classmethod - def server_version_args(cls): - """The arguments to a server.version RPC call to a peer.""" - return [cls.version, cls.protocol_min_max_strings()] - - def protocol_version_string(self): - return util.version_string(self.protocol_tuple) - - def sub_count(self): - return len(self.hashX_subs) - - async def notify(self, touched, height_changed): - """Notify the client about changes to touched addresses (from mempool - updates or new blocks) and height. - """ - if height_changed and self.subscribe_headers: - args = (await self.subscribe_headers_result(), ) - await self.send_notification('blockchain.headers.subscribe', args) - - touched = touched.intersection(self.hashX_subs) - if touched or (height_changed and self.mempool_statuses): - changed = {} - - for hashX in touched: - alias = self.hashX_subs[hashX] - status = await self.address_status(hashX) - changed[alias] = status - - # Check mempool hashXs - the status is a function of the - # confirmed state of other transactions. Note: we cannot - # iterate over mempool_statuses as it changes size. - for hashX in tuple(self.mempool_statuses): - # Items can be evicted whilst await-ing status; False - # ensures such hashXs are notified - old_status = self.mempool_statuses.get(hashX, False) - status = await self.address_status(hashX) - if status != old_status: - alias = self.hashX_subs[hashX] - changed[alias] = status - - for alias, status in changed.items(): - if len(alias) == 64: - method = 'blockchain.scripthash.subscribe' - else: - method = 'blockchain.address.subscribe' - await self.send_notification(method, (alias, status)) - - if changed: - es = '' if len(changed) == 1 else 'es' - self.logger.info(f'notified of {len(changed):,d} address{es}') - - async def subscribe_headers_result(self): - """The result of a header subscription or notification.""" - return self.session_mgr.hsub_results[self.subscribe_headers_raw] - - async def _headers_subscribe(self, raw): - """Subscribe to get headers of new blocks.""" - self.subscribe_headers_raw = assert_boolean(raw) - self.subscribe_headers = True - return await self.subscribe_headers_result() - - async def headers_subscribe(self): - """Subscribe to get raw headers of new blocks.""" - return await self._headers_subscribe(True) - - async def headers_subscribe_True(self, raw=True): - """Subscribe to get headers of new blocks.""" - return await self._headers_subscribe(raw) - - async def headers_subscribe_False(self, raw=False): - """Subscribe to get headers of new blocks.""" - return await self._headers_subscribe(raw) - - async def add_peer(self, features): - """Add a peer (but only if the peer resolves to the source).""" - return await self.peer_mgr.on_add_peer(features, self.peer_address()) - - async def peers_subscribe(self): - """Return the server peers as a list of (ip, host, details) tuples.""" - return self.peer_mgr.on_peers_subscribe(self.is_tor()) - - async def address_status(self, hashX): - """Returns an address status. - - Status is a hex string, but must be None if there is no history. - """ - # Note history is ordered and mempool unordered in electrum-server - # For mempool, height is -1 if it has unconfirmed inputs, otherwise 0 - db_history = await self.session_mgr.limited_history(hashX) - mempool = await self.mempool.transaction_summaries(hashX) - - status = ''.join(f'{hash_to_hex_str(tx_hash)}:' - f'{height:d}:' - for tx_hash, height in db_history) - status += ''.join(f'{hash_to_hex_str(tx.hash)}:' - f'{-tx.has_unconfirmed_inputs:d}:' - for tx in mempool) - if status: - status = sha256(status.encode()).hex() - else: - status = None - - if mempool: - self.mempool_statuses[hashX] = status - else: - self.mempool_statuses.pop(hashX, None) - - return status - - async def hashX_listunspent(self, hashX): - """Return the list of UTXOs of a script hash, including mempool - effects.""" - utxos = await self.db.all_utxos(hashX) - utxos = sorted(utxos) - utxos.extend(await self.mempool.unordered_UTXOs(hashX)) - spends = await self.mempool.potential_spends(hashX) - - return [{'tx_hash': hash_to_hex_str(utxo.tx_hash), - 'tx_pos': utxo.tx_pos, - 'height': utxo.height, 'value': utxo.value} - for utxo in utxos - if (utxo.tx_hash, utxo.tx_pos) not in spends] - - async def hashX_subscribe(self, hashX, alias): - self.hashX_subs[hashX] = alias - return await self.address_status(hashX) - - async def hashX_unsubscribe(self, hashX, alias): - del self.hashX_subs[hashX] - - def address_to_hashX(self, address): - try: - return self.coin.address_to_hashX(address) - except Exception: - pass - raise RPCError(BAD_REQUEST, f'{address} is not a valid address') - - async def address_get_balance(self, address): - """Return the confirmed and unconfirmed balance of an address.""" - hashX = self.address_to_hashX(address) - return await self.get_balance(hashX) - - async def address_get_history(self, address): - """Return the confirmed and unconfirmed history of an address.""" - hashX = self.address_to_hashX(address) - return await self.confirmed_and_unconfirmed_history(hashX) - - async def address_get_mempool(self, address): - """Return the mempool transactions touching an address.""" - hashX = self.address_to_hashX(address) - return await self.unconfirmed_history(hashX) - - async def address_listunspent(self, address): - """Return the list of UTXOs of an address.""" - hashX = self.address_to_hashX(address) - return await self.hashX_listunspent(hashX) - - async def address_subscribe(self, address): - """Subscribe to an address. - - address: the address to subscribe to""" - hashX = self.address_to_hashX(address) - return await self.hashX_subscribe(hashX, address) - - async def address_unsubscribe(self, address): - """Unsubscribe an address. - - address: the address to unsubscribe""" - hashX = self.address_to_hashX(address) - return await self.hashX_unsubscribe(hashX, address) - - async def get_balance(self, hashX): - utxos = await self.db.all_utxos(hashX) - confirmed = sum(utxo.value for utxo in utxos) - unconfirmed = await self.mempool.balance_delta(hashX) - return {'confirmed': confirmed, 'unconfirmed': unconfirmed} - - async def scripthash_get_balance(self, scripthash): - """Return the confirmed and unconfirmed balance of a scripthash.""" - hashX = scripthash_to_hashX(scripthash) - return await self.get_balance(hashX) - - async def unconfirmed_history(self, hashX): - # Note unconfirmed history is unordered in electrum-server - # height is -1 if it has unconfirmed inputs, otherwise 0 - return [{'tx_hash': hash_to_hex_str(tx.hash), - 'height': -tx.has_unconfirmed_inputs, - 'fee': tx.fee} - for tx in await self.mempool.transaction_summaries(hashX)] - - async def confirmed_and_unconfirmed_history(self, hashX): - # Note history is ordered but unconfirmed is unordered in e-s - history = await self.session_mgr.limited_history(hashX) - conf = [{'tx_hash': hash_to_hex_str(tx_hash), 'height': height} - for tx_hash, height in history] - return conf + await self.unconfirmed_history(hashX) - - async def scripthash_get_history(self, scripthash): - """Return the confirmed and unconfirmed history of a scripthash.""" - hashX = scripthash_to_hashX(scripthash) - return await self.confirmed_and_unconfirmed_history(hashX) - - async def scripthash_get_mempool(self, scripthash): - """Return the mempool transactions touching a scripthash.""" - hashX = scripthash_to_hashX(scripthash) - return await self.unconfirmed_history(hashX) - - async def scripthash_listunspent(self, scripthash): - """Return the list of UTXOs of a scripthash.""" - hashX = scripthash_to_hashX(scripthash) - return await self.hashX_listunspent(hashX) - - async def scripthash_subscribe(self, scripthash): - """Subscribe to a script hash. - - scripthash: the SHA256 hash of the script to subscribe to""" - hashX = scripthash_to_hashX(scripthash) - return await self.hashX_subscribe(hashX, scripthash) - - async def _merkle_proof(self, cp_height, height): - max_height = self.db.db_height - if not height <= cp_height <= max_height: - raise RPCError(BAD_REQUEST, - f'require header height {height:,d} <= ' - f'cp_height {cp_height:,d} <= ' - f'chain height {max_height:,d}') - branch, root = await self.db.header_branch_and_root(cp_height + 1, - height) - return { - 'branch': [hash_to_hex_str(elt) for elt in branch], - 'root': hash_to_hex_str(root), - } - - async def block_header(self, height, cp_height=0): - """Return a raw block header as a hexadecimal string, or as a - dictionary with a merkle proof.""" - height = non_negative_integer(height) - cp_height = non_negative_integer(cp_height) - raw_header_hex = (await self.session_mgr.raw_header(height)).hex() - if cp_height == 0: - return raw_header_hex - result = {'header': raw_header_hex} - result.update(await self._merkle_proof(cp_height, height)) - return result - - async def block_header_13(self, height): - """Return a raw block header as a hexadecimal string. - - height: the header's height""" - return await self.block_header(height) - - async def block_headers(self, start_height, count, cp_height=0, b64=False): - """Return count concatenated block headers as hex for the main chain; - starting at start_height. - - start_height and count must be non-negative integers. At most - MAX_CHUNK_SIZE headers will be returned. - """ - start_height = non_negative_integer(start_height) - count = non_negative_integer(count) - cp_height = non_negative_integer(cp_height) - - max_size = self.MAX_CHUNK_SIZE - count = min(count, max_size) - headers, count = await self.db.read_headers(start_height, count) - compressobj = zlib.compressobj(wbits=-15, level=1, memLevel=9) - headers = base64.b64encode(compressobj.compress(headers) + compressobj.flush()).decode() if b64 else headers.hex() - result = { - 'base64' if b64 else 'hex': headers, - 'count': count, - 'max': max_size - } - if count and cp_height: - last_height = start_height + count - 1 - result.update(await self._merkle_proof(cp_height, last_height)) - return result - - async def block_get_chunk(self, index): - """Return a chunk of block headers as a hexadecimal string. - - index: the chunk index""" - index = non_negative_integer(index) - size = self.coin.CHUNK_SIZE - start_height = index * size - headers, _ = await self.db.read_headers(start_height, size) - return headers.hex() - - async def block_get_header(self, height): - """The deserialized header at a given height. - - height: the header's height""" - height = non_negative_integer(height) - return await self.session_mgr.electrum_header(height) - - def is_tor(self): - """Try to detect if the connection is to a tor hidden service we are - running.""" - peername = self.peer_mgr.proxy_peername() - if not peername: - return False - peer_address = self.peer_address() - return peer_address and peer_address[0] == peername[0] - - async def replaced_banner(self, banner): - network_info = await self.daemon_request('getnetworkinfo') - ni_version = network_info['version'] - major, minor = divmod(ni_version, 1000000) - minor, revision = divmod(minor, 10000) - revision //= 100 - daemon_version = f'{major:d}.{minor:d}.{revision:d}' - for pair in [ - ('$SERVER_VERSION', self.version), - ('$DAEMON_VERSION', daemon_version), - ('$DAEMON_SUBVERSION', network_info['subversion']), - ('$DONATION_ADDRESS', self.env.donation_address), - ]: - banner = banner.replace(*pair) - return banner - - async def donation_address(self): - """Return the donation address as a string, empty if there is none.""" - return self.env.donation_address - - async def banner(self): - """Return the server banner text.""" - banner = f'You are connected to an {self.version} server.' - - if self.is_tor(): - banner_file = self.env.tor_banner_file - else: - banner_file = self.env.banner_file - if banner_file: - try: - with codecs.open(banner_file, 'r', 'utf-8') as f: - banner = f.read() - except Exception as e: - self.logger.error(f'reading banner file {banner_file}: {e!r}') - else: - banner = await self.replaced_banner(banner) - - return banner - - async def relayfee(self): - """The minimum fee a low-priority tx must pay in order to be accepted - to the daemon's memory pool.""" - return await self.daemon_request('relayfee') - - async def estimatefee(self, number): - """The estimated transaction fee per kilobyte to be paid for a - transaction to be included within a certain number of blocks. - - number: the number of blocks - """ - number = non_negative_integer(number) - return await self.daemon_request('estimatefee', number) - - async def ping(self): - """Serves as a connection keep-alive mechanism and for the client to - confirm the server is still responding. - """ - return None - - async def server_version(self, client_name='', protocol_version=None): - """Returns the server version as a string. - - client_name: a string identifying the client - protocol_version: the protocol version spoken by the client - """ - if self.sv_seen and self.protocol_tuple >= (1, 4): - raise RPCError(BAD_REQUEST, f'server.version already sent') - self.sv_seen = True - - if client_name: - client_name = str(client_name) - if self.env.drop_client is not None and \ - self.env.drop_client.match(client_name): - self.close_after_send = True - raise RPCError(BAD_REQUEST, - f'unsupported client: {client_name}') - self.client = client_name[:17] - - # Find the highest common protocol version. Disconnect if - # that protocol version in unsupported. - ptuple, client_min = util.protocol_version( - protocol_version, self.PROTOCOL_MIN, self.PROTOCOL_MAX) - if ptuple is None: - if client_min > self.PROTOCOL_MIN: - self.logger.info(f'client requested future protocol version ' - f'{util.version_string(client_min)} ' - f'- is your software out of date?') - self.close_after_send = True - raise RPCError(BAD_REQUEST, - f'unsupported protocol version: {protocol_version}') - self.set_request_handlers(ptuple) - - return self.version, self.protocol_version_string() - - async def transaction_broadcast(self, raw_tx): - """Broadcast a raw transaction to the network. - - raw_tx: the raw transaction as a hexadecimal string""" - # This returns errors as JSON RPC errors, as is natural - try: - hex_hash = await self.session_mgr.broadcast_transaction(raw_tx) - self.txs_sent += 1 - self.logger.info(f'sent tx: {hex_hash}') - return hex_hash - except DaemonError as e: - error, = e.args - message = error['message'] - self.logger.info(f'error sending transaction: {message}') - raise RPCError(BAD_REQUEST, 'the transaction was rejected by ' - f'network rules.\n\n{message}\n[{raw_tx}]') - - async def transaction_get(self, tx_hash, verbose=False): - """Return the serialized raw transaction given its hash - - tx_hash: the transaction hash as a hexadecimal string - verbose: passed on to the daemon - """ - assert_tx_hash(tx_hash) - if verbose not in (True, False): - raise RPCError(BAD_REQUEST, f'"verbose" must be a boolean') - - return await self.daemon_request('getrawtransaction', tx_hash, verbose) - - async def _block_hash_and_tx_hashes(self, height): - """Returns a pair (block_hash, tx_hashes) for the main chain block at - the given height. - - block_hash is a hexadecimal string, and tx_hashes is an - ordered list of hexadecimal strings. - """ - height = non_negative_integer(height) - hex_hashes = await self.daemon_request('block_hex_hashes', height, 1) - block_hash = hex_hashes[0] - block = await self.daemon_request('deserialised_block', block_hash) - return block_hash, block['tx'] - - def _get_merkle_branch(self, tx_hashes, tx_pos): - """Return a merkle branch to a transaction. - - tx_hashes: ordered list of hex strings of tx hashes in a block - tx_pos: index of transaction in tx_hashes to create branch for - """ - hashes = [hex_str_to_hash(hash) for hash in tx_hashes] - branch, root = self.db.merkle.branch_and_root(hashes, tx_pos) - branch = [hash_to_hex_str(hash) for hash in branch] - return branch - - async def transaction_merkle(self, tx_hash, height): - """Return the markle branch to a confirmed transaction given its hash - and height. - - tx_hash: the transaction hash as a hexadecimal string - height: the height of the block it is in - """ - assert_tx_hash(tx_hash) - block_hash, tx_hashes = await self._block_hash_and_tx_hashes(height) - try: - pos = tx_hashes.index(tx_hash) - except ValueError: - raise RPCError(BAD_REQUEST, f'tx hash {tx_hash} not in ' - f'block {block_hash} at height {height:,d}') - branch = self._get_merkle_branch(tx_hashes, pos) - return {"block_height": height, "merkle": branch, "pos": pos} - - async def transaction_id_from_pos(self, height, tx_pos, merkle=False): - """Return the txid and optionally a merkle proof, given - a block height and position in the block. - """ - tx_pos = non_negative_integer(tx_pos) - if merkle not in (True, False): - raise RPCError(BAD_REQUEST, f'"merkle" must be a boolean') - - block_hash, tx_hashes = await self._block_hash_and_tx_hashes(height) - try: - tx_hash = tx_hashes[tx_pos] - except IndexError: - raise RPCError(BAD_REQUEST, f'no tx at position {tx_pos:,d} in ' - f'block {block_hash} at height {height:,d}') - - if merkle: - branch = self._get_merkle_branch(tx_hashes, tx_pos) - return {"tx_hash": tx_hash, "merkle": branch} - else: - return tx_hash - - def set_request_handlers(self, ptuple): - self.protocol_tuple = ptuple - - handlers = { - 'blockchain.block.get_chunk': self.block_get_chunk, - 'blockchain.block.get_header': self.block_get_header, - 'blockchain.estimatefee': self.estimatefee, - 'blockchain.relayfee': self.relayfee, - 'blockchain.scripthash.get_balance': self.scripthash_get_balance, - 'blockchain.scripthash.get_history': self.scripthash_get_history, - 'blockchain.scripthash.get_mempool': self.scripthash_get_mempool, - 'blockchain.scripthash.listunspent': self.scripthash_listunspent, - 'blockchain.scripthash.subscribe': self.scripthash_subscribe, - 'blockchain.transaction.broadcast': self.transaction_broadcast, - 'blockchain.transaction.get': self.transaction_get, - 'blockchain.transaction.get_merkle': self.transaction_merkle, - 'server.add_peer': self.add_peer, - 'server.banner': self.banner, - 'server.donation_address': self.donation_address, - 'server.features': self.server_features_async, - 'server.peers.subscribe': self.peers_subscribe, - 'server.version': self.server_version, - } - - if ptuple >= (1, 2): - # New handler as of 1.2 - handlers.update({ - 'mempool.get_fee_histogram': - self.mempool.compact_fee_histogram, - 'blockchain.block.headers': self.block_headers, - 'server.ping': self.ping, - }) - - if ptuple >= (1, 4): - handlers.update({ - 'blockchain.block.header': self.block_header, - 'blockchain.block.headers': self.block_headers, - 'blockchain.headers.subscribe': self.headers_subscribe, - 'blockchain.transaction.id_from_pos': - self.transaction_id_from_pos, - }) - elif ptuple >= (1, 3): - handlers.update({ - 'blockchain.block.header': self.block_header_13, - 'blockchain.headers.subscribe': self.headers_subscribe_True, - }) - else: - handlers.update({ - 'blockchain.headers.subscribe': self.headers_subscribe_False, - 'blockchain.address.get_balance': self.address_get_balance, - 'blockchain.address.get_history': self.address_get_history, - 'blockchain.address.get_mempool': self.address_get_mempool, - 'blockchain.address.listunspent': self.address_listunspent, - 'blockchain.address.subscribe': self.address_subscribe, - 'blockchain.address.unsubscribe': self.address_unsubscribe, - }) - - self.request_handlers = handlers - - -class LocalRPC(SessionBase): - """A local TCP RPC server session.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.client = 'RPC' - self.connection._max_response_size = 0 - - def protocol_version_string(self): - return 'RPC' - - -class DashElectrumX(ElectrumX): - """A TCP server that handles incoming Electrum Dash connections.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.mns = set() - - def set_request_handlers(self, ptuple): - super().set_request_handlers(ptuple) - self.request_handlers.update({ - 'masternode.announce.broadcast': - self.masternode_announce_broadcast, - 'masternode.subscribe': self.masternode_subscribe, - 'masternode.list': self.masternode_list - }) - - async def notify(self, touched, height_changed): - """Notify the client about changes in masternode list.""" - await super().notify(touched, height_changed) - for mn in self.mns: - status = await self.daemon_request('masternode_list', - ['status', mn]) - await self.send_notification('masternode.subscribe', - [mn, status.get(mn)]) - - # Masternode command handlers - async def masternode_announce_broadcast(self, signmnb): - """Pass through the masternode announce message to be broadcast - by the daemon. - - signmnb: signed masternode broadcast message.""" - try: - return await self.daemon_request('masternode_broadcast', - ['relay', signmnb]) - except DaemonError as e: - error, = e.args - message = error['message'] - self.logger.info(f'masternode_broadcast: {message}') - raise RPCError(BAD_REQUEST, 'the masternode broadcast was ' - f'rejected.\n\n{message}\n[{signmnb}]') - - async def masternode_subscribe(self, collateral): - """Returns the status of masternode. - - collateral: masternode collateral. - """ - result = await self.daemon_request('masternode_list', - ['status', collateral]) - if result is not None: - self.mns.add(collateral) - return result.get(collateral) - return None - - async def masternode_list(self, payees): - """ - Returns the list of masternodes. - - payees: a list of masternode payee addresses. - """ - if not isinstance(payees, list): - raise RPCError(BAD_REQUEST, 'expected a list of payees') - - def get_masternode_payment_queue(mns): - """Returns the calculated position in the payment queue for all the - valid masterernodes in the given mns list. - - mns: a list of masternodes information. - """ - now = int(datetime.datetime.utcnow().strftime("%s")) - mn_queue = [] - - # Only ENABLED masternodes are considered for the list. - for line in mns: - mnstat = mns[line].split() - if mnstat[0] == 'ENABLED': - # if last paid time == 0 - if int(mnstat[5]) == 0: - # use active seconds - mnstat.append(int(mnstat[4])) - else: - # now minus last paid - delta = now - int(mnstat[5]) - # if > active seconds, use active seconds - if delta >= int(mnstat[4]): - mnstat.append(int(mnstat[4])) - # use active seconds - else: - mnstat.append(delta) - mn_queue.append(mnstat) - mn_queue = sorted(mn_queue, key=lambda x: x[8], reverse=True) - return mn_queue - - def get_payment_position(payment_queue, address): - """ - Returns the position of the payment list for the given address. - - payment_queue: position in the payment queue for the masternode. - address: masternode payee address. - """ - position = -1 - for pos, mn in enumerate(payment_queue, start=1): - if mn[2] == address: - position = pos - break - return position - - # Accordingly with the masternode payment queue, a custom list - # with the masternode information including the payment - # position is returned. - cache = self.session_mgr.mn_cache - if not cache or self.session_mgr.mn_cache_height != self.db.db_height: - full_mn_list = await self.daemon_request('masternode_list', - ['full']) - mn_payment_queue = get_masternode_payment_queue(full_mn_list) - mn_payment_count = len(mn_payment_queue) - mn_list = [] - for key, value in full_mn_list.items(): - mn_data = value.split() - mn_info = {} - mn_info['vin'] = key - mn_info['status'] = mn_data[0] - mn_info['protocol'] = mn_data[1] - mn_info['payee'] = mn_data[2] - mn_info['lastseen'] = mn_data[3] - mn_info['activeseconds'] = mn_data[4] - mn_info['lastpaidtime'] = mn_data[5] - mn_info['lastpaidblock'] = mn_data[6] - mn_info['ip'] = mn_data[7] - mn_info['paymentposition'] = get_payment_position( - mn_payment_queue, mn_info['payee']) - mn_info['inselection'] = ( - mn_info['paymentposition'] < mn_payment_count // 10) - balance = await self.address_get_balance(mn_info['payee']) - mn_info['balance'] = (sum(balance.values()) - / self.coin.VALUE_PER_COIN) - mn_list.append(mn_info) - cache.clear() - cache.extend(mn_list) - self.session_mgr.mn_cache_height = self.db.db_height - - # If payees is an empty list the whole masternode list is returned - if payees: - return [mn for mn in cache if mn['payee'] in payees] - else: - return cache diff --git a/torba/torba/ui/__init__.py b/torba/torba/ui/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/torba/torba/workbench/Makefile b/torba/torba/workbench/Makefile deleted file mode 100644 index 524c22557..000000000 --- a/torba/torba/workbench/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -all: _blockchain_dock.py _output_dock.py -_blockchain_dock.py: blockchain_dock.ui - pyside2-uic -d blockchain_dock.ui -o _blockchain_dock.py -_output_dock.py: output_dock.ui - pyside2-uic -d output_dock.ui -o _output_dock.py diff --git a/torba/torba/workbench/__init__.py b/torba/torba/workbench/__init__.py deleted file mode 100644 index 3449276fd..000000000 --- a/torba/torba/workbench/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .application import main diff --git a/torba/torba/workbench/_blockchain_dock.py b/torba/torba/workbench/_blockchain_dock.py deleted file mode 100644 index 2a7cc11d8..000000000 --- a/torba/torba/workbench/_blockchain_dock.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'blockchain_dock.ui', -# licensing of 'blockchain_dock.ui' applies. -# -# Created: Sun Jan 13 02:56:21 2019 -# by: pyside2-uic running on PySide2 5.12.0 -# -# WARNING! All changes made in this file will be lost! - -from PySide2 import QtCore, QtGui, QtWidgets - -class Ui_BlockchainDock(object): - def setupUi(self, BlockchainDock): - BlockchainDock.setObjectName("BlockchainDock") - BlockchainDock.resize(416, 167) - BlockchainDock.setFloating(False) - BlockchainDock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures) - self.dockWidgetContents = QtWidgets.QWidget() - self.dockWidgetContents.setObjectName("dockWidgetContents") - self.formLayout = QtWidgets.QFormLayout(self.dockWidgetContents) - self.formLayout.setObjectName("formLayout") - self.generate = QtWidgets.QPushButton(self.dockWidgetContents) - self.generate.setObjectName("generate") - self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.generate) - self.blocks = QtWidgets.QSpinBox(self.dockWidgetContents) - self.blocks.setMinimum(1) - self.blocks.setMaximum(9999) - self.blocks.setProperty("value", 1) - self.blocks.setObjectName("blocks") - self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.blocks) - self.transfer = QtWidgets.QPushButton(self.dockWidgetContents) - self.transfer.setObjectName("transfer") - self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.transfer) - self.horizontalLayout = QtWidgets.QHBoxLayout() - self.horizontalLayout.setObjectName("horizontalLayout") - self.amount = QtWidgets.QDoubleSpinBox(self.dockWidgetContents) - self.amount.setSuffix("") - self.amount.setMaximum(9999.99) - self.amount.setProperty("value", 10.0) - self.amount.setObjectName("amount") - self.horizontalLayout.addWidget(self.amount) - self.to_label = QtWidgets.QLabel(self.dockWidgetContents) - self.to_label.setObjectName("to_label") - self.horizontalLayout.addWidget(self.to_label) - self.address = QtWidgets.QLineEdit(self.dockWidgetContents) - self.address.setObjectName("address") - self.horizontalLayout.addWidget(self.address) - self.formLayout.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout) - self.invalidate = QtWidgets.QPushButton(self.dockWidgetContents) - self.invalidate.setObjectName("invalidate") - self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.invalidate) - self.block_hash = QtWidgets.QLineEdit(self.dockWidgetContents) - self.block_hash.setObjectName("block_hash") - self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.block_hash) - BlockchainDock.setWidget(self.dockWidgetContents) - - self.retranslateUi(BlockchainDock) - QtCore.QMetaObject.connectSlotsByName(BlockchainDock) - - def retranslateUi(self, BlockchainDock): - BlockchainDock.setWindowTitle(QtWidgets.QApplication.translate("BlockchainDock", "Blockchain", None, -1)) - self.generate.setText(QtWidgets.QApplication.translate("BlockchainDock", "generate", None, -1)) - self.blocks.setSuffix(QtWidgets.QApplication.translate("BlockchainDock", " block(s)", None, -1)) - self.transfer.setText(QtWidgets.QApplication.translate("BlockchainDock", "transfer", None, -1)) - self.to_label.setText(QtWidgets.QApplication.translate("BlockchainDock", "to", None, -1)) - self.address.setPlaceholderText(QtWidgets.QApplication.translate("BlockchainDock", "recipient address", None, -1)) - self.invalidate.setText(QtWidgets.QApplication.translate("BlockchainDock", "invalidate", None, -1)) - self.block_hash.setPlaceholderText(QtWidgets.QApplication.translate("BlockchainDock", "block hash", None, -1)) - diff --git a/torba/torba/workbench/_output_dock.py b/torba/torba/workbench/_output_dock.py deleted file mode 100644 index 980343735..000000000 --- a/torba/torba/workbench/_output_dock.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- - -# Form implementation generated from reading ui file 'output_dock.ui', -# licensing of 'output_dock.ui' applies. -# -# Created: Sat Oct 27 16:41:03 2018 -# by: pyside2-uic running on PySide2 5.11.2 -# -# WARNING! All changes made in this file will be lost! - -from PySide2 import QtCore, QtGui, QtWidgets - -class Ui_OutputDock(object): - def setupUi(self, OutputDock): - OutputDock.setObjectName("OutputDock") - OutputDock.resize(700, 397) - OutputDock.setFloating(False) - OutputDock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures) - self.dockWidgetContents = QtWidgets.QWidget() - self.dockWidgetContents.setObjectName("dockWidgetContents") - self.horizontalLayout = QtWidgets.QHBoxLayout(self.dockWidgetContents) - self.horizontalLayout.setObjectName("horizontalLayout") - self.textEdit = QtWidgets.QTextEdit(self.dockWidgetContents) - self.textEdit.setReadOnly(True) - self.textEdit.setObjectName("textEdit") - self.horizontalLayout.addWidget(self.textEdit) - OutputDock.setWidget(self.dockWidgetContents) - - self.retranslateUi(OutputDock) - QtCore.QMetaObject.connectSlotsByName(OutputDock) - - def retranslateUi(self, OutputDock): - OutputDock.setWindowTitle(QtWidgets.QApplication.translate("OutputDock", "Output", None, -1)) - diff --git a/torba/torba/workbench/application.py b/torba/torba/workbench/application.py deleted file mode 100644 index 56b282844..000000000 --- a/torba/torba/workbench/application.py +++ /dev/null @@ -1,401 +0,0 @@ -import sys -import json -import math - -from PySide2 import QtCore, QtGui, QtWidgets, QtNetwork, QtWebSockets, QtSvg - -from torba.workbench._output_dock import Ui_OutputDock as OutputDock -from torba.workbench._blockchain_dock import Ui_BlockchainDock as BlockchainDock - - -def dict_to_post_data(d): - query = QtCore.QUrlQuery() - for key, value in d.items(): - query.addQueryItem(str(key), str(value)) - return QtCore.QByteArray(query.toString().encode()) - - -class LoggingOutput(QtWidgets.QDockWidget, OutputDock): - - def __init__(self, title, parent): - super().__init__(parent) - self.setupUi(self) - self.setWindowTitle(title) - - -class BlockchainControls(QtWidgets.QDockWidget, BlockchainDock): - - def __init__(self, parent): - super().__init__(parent) - self.setupUi(self) - self.generate.clicked.connect(self.on_generate) - self.transfer.clicked.connect(self.on_transfer) - - def on_generate(self): - print('generating') - self.parent().run_command('generate', blocks=self.blocks.value()) - - def on_transfer(self): - print('transfering') - self.parent().run_command('transfer', amount=self.amount.value()) - - -class Arrow(QtWidgets.QGraphicsLineItem): - - def __init__(self, start_node, end_node, parent=None, scene=None): - super().__init__(parent, scene) - self.start_node = start_node - self.start_node.connect_arrow(self) - self.end_node = end_node - self.end_node.connect_arrow(self) - self.arrow_head = QtGui.QPolygonF() - self.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable, True) - self.setZValue(-1000.0) - self.arrow_color = QtCore.Qt.black - self.setPen(QtGui.QPen( - self.arrow_color, 2, QtCore.Qt.SolidLine, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin - )) - - def boundingRect(self): - extra = (self.pen().width() + 20) / 2.0 - p1 = self.line().p1() - p2 = self.line().p2() - size = QtCore.QSizeF(p2.x() - p1.x(), p2.y() - p1.y()) - return QtCore.QRectF(p1, size).normalized().adjusted(-extra, -extra, extra, extra) - - def shape(self): - path = super().shape() - path.addPolygon(self.arrow_head) - return path - - def update_position(self): - line = QtCore.QLineF( - self.mapFromItem(self.start_node, 0, 0), - self.mapFromItem(self.end_node, 0, 0) - ) - self.setLine(line) - - def paint(self, painter, option, widget=None): - if self.start_node.collidesWithItem(self.end_node): - return - - start_node = self.start_node - end_node = self.end_node - color = self.arrow_color - pen = self.pen() - pen.setColor(self.arrow_color) - arrow_size = 20.0 - painter.setPen(pen) - painter.setBrush(self.arrow_color) - - end_rectangle = end_node.sceneBoundingRect() - start_center = start_node.sceneBoundingRect().center() - end_center = end_rectangle.center() - center_line = QtCore.QLineF(start_center, end_center) - end_polygon = QtGui.QPolygonF(end_rectangle) - p1 = end_polygon.at(0) - - intersect_point = QtCore.QPointF() - for p2 in end_polygon: - poly_line = QtCore.QLineF(p1, p2) - intersect_type, intersect_point = poly_line.intersect(center_line) - if intersect_type == QtCore.QLineF.BoundedIntersection: - break - p1 = p2 - - self.setLine(QtCore.QLineF(intersect_point, start_center)) - line = self.line() - - angle = math.acos(line.dx() / line.length()) - if line.dy() >= 0: - angle = (math.pi * 2.0) - angle - - arrow_p1 = line.p1() + QtCore.QPointF( - math.sin(angle + math.pi / 3.0) * arrow_size, - math.cos(angle + math.pi / 3.0) * arrow_size - ) - arrow_p2 = line.p1() + QtCore.QPointF( - math.sin(angle + math.pi - math.pi / 3.0) * arrow_size, - math.cos(angle + math.pi - math.pi / 3.0) * arrow_size - ) - - self.arrow_head.clear() - for point in [line.p1(), arrow_p1, arrow_p2]: - self.arrow_head.append(point) - - painter.drawLine(line) - painter.drawPolygon(self.arrow_head) - if self.isSelected(): - painter.setPen(QtGui.QPen(color, 1, QtCore.Qt.DashLine)) - line = QtCore.QLineF(line) - line.translate(0, 4.0) - painter.drawLine(line) - line.translate(0, -8.0) - painter.drawLine(line) - - -ONLINE_COLOR = "limegreen" -OFFLINE_COLOR = "lightsteelblue" - - -class NodeItem(QtSvg.QGraphicsSvgItem): - - def __init__(self, context_menu): - super().__init__() - self._port = '' - self._color = OFFLINE_COLOR - self.context_menu = context_menu - self.arrows = set() - self.renderer = QtSvg.QSvgRenderer() - self.update_svg() - self.setSharedRenderer(self.renderer) - #self.setScale(2.0) - #self.setTransformOriginPoint(24, 24) - self.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable, True) - self.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable, True) - - def get_svg(self): - return self.SVG.format( - port=self.port, - color=self._color - ) - - def update_svg(self): - self.renderer.load(QtCore.QByteArray(self.get_svg().encode())) - self.update() - - @property - def port(self): - return self._port - - @port.setter - def port(self, port): - self._port = port - self.update_svg() - - @property - def online(self): - return self._color == ONLINE_COLOR - - @online.setter - def online(self, online): - if online: - self._color = ONLINE_COLOR - else: - self._color = OFFLINE_COLOR - self.update_svg() - - def connect_arrow(self, arrow): - self.arrows.add(arrow) - - def disconnect_arrow(self, arrow): - self.arrows.discard(arrow) - - def contextMenuEvent(self, event): - self.scene().clearSelection() - self.setSelected(True) - self.myContextMenu.exec_(event.screenPos()) - - def itemChange(self, change, value): - if change == QtWidgets.QGraphicsItem.ItemPositionChange: - for arrow in self.arrows: - arrow.update_position() - return value - - -class BlockchainNode(NodeItem): - SVG = """ - - - - - {port} - {block} - - """ - - def __init__(self, *args): - self._block_height = '' - super().__init__(*args) - - @property - def block_height(self): - return self._block_height - - @block_height.setter - def block_height(self, block_height): - self._block_height = block_height - self.update_svg() - - def get_svg(self): - return self.SVG.format( - port=self.port, - block=self.block_height, - color=self._color - ) - - -class SPVNode(NodeItem): - SVG = """ - - - - - - - {port} - - """ - - def __init__(self, *args): - super().__init__(*args) - - -class WalletNode(NodeItem): - SVG = """ - - - - - - - - {coins} - - """ - - def __init__(self, *args): - self._coins = '--' - super().__init__(*args) - - @property - def coins(self): - return self._coins - - @coins.setter - def coins(self, coins): - self._coins = coins - self.update_svg() - - def get_svg(self): - return self.SVG.format( - coins=self.coins, - color=self._color - ) - - -class Stage(QtWidgets.QGraphicsScene): - - def __init__(self, parent): - super().__init__(parent) - self.blockchain = b = BlockchainNode(None) - b.port = '' - b.block_height = '' - b.setZValue(0) - b.setPos(-25, -100) - self.addItem(b) - self.spv = s = SPVNode(None) - s.port = '' - s.setZValue(0) - self.addItem(s) - s.setPos(-10, -10) - self.wallet = w = WalletNode(None) - w.coins = '' - w.setZValue(0) - w.update_svg() - self.addItem(w) - w.setPos(0, 100) - - self.addItem(Arrow(b, s)) - self.addItem(Arrow(s, w)) - - -class Orchstr8Workbench(QtWidgets.QMainWindow): - - def __init__(self): - super().__init__() - self.stage = Stage(self) - self.view = QtWidgets.QGraphicsView(self.stage) - self.status_bar = QtWidgets.QStatusBar(self) - - self.setWindowTitle('Orchstr8 Workbench') - self.setCentralWidget(self.view) - self.setStatusBar(self.status_bar) - - self.block_height = self.make_status_label('Height: -- ') - self.user_balance = self.make_status_label('User Balance: -- ') - self.mining_balance = self.make_status_label('Mining Balance: -- ') - - self.wallet_log = LoggingOutput('Wallet', self) - self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.wallet_log) - self.spv_log = LoggingOutput('SPV Server', self) - self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.spv_log) - self.blockchain_log = LoggingOutput('Blockchain', self) - self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.blockchain_log) - - self.blockchain_controls = BlockchainControls(self) - self.addDockWidget(QtCore.Qt.RightDockWidgetArea, self.blockchain_controls) - - self.network = QtNetwork.QNetworkAccessManager(self) - self.socket = QtWebSockets.QWebSocket() - self.socket.connected.connect(lambda: self.run_command('start')) - self.socket.error.connect(lambda e: print(f'errored: {e}')) - self.socket.textMessageReceived.connect(self.on_message) - self.socket.open('ws://localhost:7954/log') - - def make_status_label(self, text): - label = QtWidgets.QLabel(text) - label.setFrameStyle(QtWidgets.QLabel.Panel | QtWidgets.QLabel.Sunken) - self.status_bar.addPermanentWidget(label) - return label - - def on_message(self, text): - msg = json.loads(text) - if msg['type'] == 'status': - self.stage.wallet.coins = msg['balance'] - self.stage.blockchain.block_height = msg['height'] - self.block_height.setText(f"Height: {msg['height']} ") - self.user_balance.setText(f"User Balance: {msg['balance']} ") - self.mining_balance.setText(f"Mining Balance: {msg['miner']} ") - elif msg['type'] == 'service': - node = { - 'blockchain': self.stage.blockchain, - 'spv': self.stage.spv, - 'wallet': self.stage.wallet - }[msg['name']] - node.online = True - node.port = f":{msg['port']}" - elif msg['type'] == 'log': - log = { - 'blockchain': self.blockchain_log, - 'electrumx': self.spv_log, - 'lbryumx': self.spv_log, - 'Controller': self.spv_log, - 'LBRYBlockProcessor': self.spv_log, - 'LBCDaemon': self.spv_log, - }.get(msg['name'].split('.')[-1], self.wallet_log) - log.textEdit.append(msg['message']) - - def run_command(self, command, **kwargs): - request = QtNetwork.QNetworkRequest(QtCore.QUrl('http://localhost:7954/'+command)) - request.setHeader(QtNetwork.QNetworkRequest.ContentTypeHeader, "application/x-www-form-urlencoded") - reply = self.network.post(request, dict_to_post_data(kwargs)) - # reply.finished.connect(cb) - reply.error.connect(self.on_command_error) - - @staticmethod - def on_command_error(error): - print('failed executing command:') - print(error) - - -def main(): - app = QtWidgets.QApplication(sys.argv) - workbench = Orchstr8Workbench() - workbench.setGeometry(100, 100, 1200, 600) - workbench.show() - return app.exec_() - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/torba/torba/workbench/blockchain_dock.ui b/torba/torba/workbench/blockchain_dock.ui deleted file mode 100644 index 2946b839d..000000000 --- a/torba/torba/workbench/blockchain_dock.ui +++ /dev/null @@ -1,104 +0,0 @@ - - - BlockchainDock - - - - 0 - 0 - 416 - 167 - - - - false - - - QDockWidget::AllDockWidgetFeatures - - - Blockchain - - - - - - - generate - - - - - - - block(s) - - - 1 - - - 9999 - - - 1 - - - - - - - transfer - - - - - - - - - - - - 9999.989999999999782 - - - 10.000000000000000 - - - - - - - to - - - - - - - recipient address - - - - - - - - - invalidate - - - - - - - block hash - - - - - - - - - diff --git a/torba/torba/workbench/output_dock.ui b/torba/torba/workbench/output_dock.ui deleted file mode 100644 index 3e1136659..000000000 --- a/torba/torba/workbench/output_dock.ui +++ /dev/null @@ -1,36 +0,0 @@ - - - OutputDock - - - - 0 - 0 - 700 - 397 - - - - false - - - QDockWidget::AllDockWidgetFeatures - - - Output - - - - - - - true - - - - - - - - - diff --git a/torba/tox.ini b/torba/tox.ini deleted file mode 100644 index 8ec8dfcdf..000000000 --- a/torba/tox.ini +++ /dev/null @@ -1,18 +0,0 @@ -[tox] -#envlist = unit,integration-{torba.coin.bitcoincash,torba.coin.bitcoinsegwit} -envlist = py37-unit,py37-integration-torba.coin.bitcoinsegwit - -[travis:env] -TESTTYPE = - unit: unit - integration: integration - -[testenv] -deps = coverage -changedir = {toxinidir}/tests -setenv = - integration: TORBA_LEDGER={envname} -commands = - unit: coverage run -p --source={envsitepackagesdir}/torba -m unittest discover -vv -t . client_tests.unit - integration: orchstr8 download - integration: coverage run -p --source={envsitepackagesdir}/torba -m unittest discover -vv -t . client_tests.integration