2019-05-14 04:46:52 +02:00
|
|
|
import time
|
2019-12-31 00:47:37 +01:00
|
|
|
import asyncio
|
2021-01-12 18:24:08 +01:00
|
|
|
import typing
|
2021-07-14 19:09:57 +02:00
|
|
|
import struct
|
2021-02-21 23:26:13 +01:00
|
|
|
from bisect import bisect_right
|
2019-12-31 00:47:37 +01:00
|
|
|
from struct import pack, unpack
|
2020-02-25 20:15:40 +01:00
|
|
|
from concurrent.futures.thread import ThreadPoolExecutor
|
2021-09-03 06:33:40 +02:00
|
|
|
from typing import Optional, List, Tuple, Set, DefaultDict, Dict, NamedTuple
|
2020-04-24 03:17:44 +02:00
|
|
|
from prometheus_client import Gauge, Histogram
|
2021-02-19 19:19:58 +01:00
|
|
|
from collections import defaultdict
|
2021-07-14 19:09:57 +02:00
|
|
|
import array
|
2019-12-31 20:52:57 +01:00
|
|
|
import lbry
|
2021-01-11 18:17:54 +01:00
|
|
|
from lbry.schema.claim import Claim
|
2021-06-02 17:00:27 +02:00
|
|
|
from lbry.schema.mime_types import guess_stream_type
|
2021-05-28 20:10:35 +02:00
|
|
|
from lbry.wallet.ledger import Ledger, TestNetLedger, RegTestLedger
|
|
|
|
from lbry.wallet.constants import TXO_TYPES
|
2021-06-02 17:00:27 +02:00
|
|
|
from lbry.wallet.server.db.common import STREAM_TYPES, CLAIM_TYPES
|
2021-05-28 20:10:35 +02:00
|
|
|
|
2021-06-04 22:50:37 +02:00
|
|
|
from lbry.wallet.transaction import OutputScript, Output, Transaction
|
2021-05-20 19:31:40 +02:00
|
|
|
from lbry.wallet.server.tx import Tx, TxOutput, TxInput
|
2019-12-31 20:52:57 +01:00
|
|
|
from lbry.wallet.server.daemon import DaemonError
|
|
|
|
from lbry.wallet.server.hash import hash_to_hex_str, HASHX_LEN
|
|
|
|
from lbry.wallet.server.util import chunks, class_logger
|
2021-02-19 19:19:58 +01:00
|
|
|
from lbry.crypto.hash import hash160
|
2019-12-31 20:52:57 +01:00
|
|
|
from lbry.wallet.server.leveldb import FlushData
|
2021-07-16 21:12:46 +02:00
|
|
|
from lbry.wallet.server.mempool import MemPool
|
2021-05-20 19:31:40 +02:00
|
|
|
from lbry.wallet.server.db.claimtrie import StagedClaimtrieItem, StagedClaimtrieSupport
|
2021-05-27 19:35:41 +02:00
|
|
|
from lbry.wallet.server.db.claimtrie import get_takeover_name_ops, StagedActivation, get_add_effective_amount_ops
|
|
|
|
from lbry.wallet.server.db.claimtrie import get_remove_name_ops, get_remove_effective_amount_ops
|
2021-05-20 19:31:40 +02:00
|
|
|
from lbry.wallet.server.db.prefixes import ACTIVATED_SUPPORT_TXO_TYPE, ACTIVATED_CLAIM_TXO_TYPE
|
2021-07-26 03:45:42 +02:00
|
|
|
from lbry.wallet.server.db.prefixes import PendingActivationKey, PendingActivationValue, Prefixes, ClaimToTXOValue
|
2021-01-21 22:08:33 +01:00
|
|
|
from lbry.wallet.server.udp import StatusServer
|
2021-06-22 23:25:23 +02:00
|
|
|
from lbry.wallet.server.db.revertable import RevertableOp, RevertablePut, RevertableDelete, RevertableOpStack
|
2021-01-12 18:24:08 +01:00
|
|
|
if typing.TYPE_CHECKING:
|
|
|
|
from lbry.wallet.server.leveldb import LevelDB
|
2019-12-31 00:47:37 +01:00
|
|
|
|
|
|
|
|
2021-09-03 06:33:40 +02:00
|
|
|
class TrendingNotification(NamedTuple):
|
|
|
|
height: int
|
|
|
|
added: bool
|
|
|
|
prev_amount: int
|
|
|
|
new_amount: int
|
|
|
|
|
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
class Prefetcher:
|
|
|
|
"""Prefetches blocks (in the forward direction only)."""
|
|
|
|
|
|
|
|
def __init__(self, daemon, coin, blocks_event):
|
|
|
|
self.logger = class_logger(__name__, self.__class__.__name__)
|
|
|
|
self.daemon = daemon
|
|
|
|
self.coin = coin
|
|
|
|
self.blocks_event = blocks_event
|
|
|
|
self.blocks = []
|
|
|
|
self.caught_up = False
|
|
|
|
# Access to fetched_height should be protected by the semaphore
|
|
|
|
self.fetched_height = None
|
|
|
|
self.semaphore = asyncio.Semaphore()
|
|
|
|
self.refill_event = asyncio.Event()
|
|
|
|
# The prefetched block cache size. The min cache size has
|
|
|
|
# little effect on sync time.
|
|
|
|
self.cache_size = 0
|
|
|
|
self.min_cache_size = 10 * 1024 * 1024
|
|
|
|
# This makes the first fetch be 10 blocks
|
|
|
|
self.ave_size = self.min_cache_size // 10
|
|
|
|
self.polling_delay = 5
|
|
|
|
|
|
|
|
async def main_loop(self, bp_height):
|
|
|
|
"""Loop forever polling for more blocks."""
|
|
|
|
await self.reset_height(bp_height)
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
# Sleep a while if there is nothing to prefetch
|
|
|
|
await self.refill_event.wait()
|
|
|
|
if not await self._prefetch_blocks():
|
|
|
|
await asyncio.sleep(self.polling_delay)
|
|
|
|
except DaemonError as e:
|
|
|
|
self.logger.info(f'ignoring daemon error: {e}')
|
|
|
|
|
|
|
|
def get_prefetched_blocks(self):
|
|
|
|
"""Called by block processor when it is processing queued blocks."""
|
|
|
|
blocks = self.blocks
|
|
|
|
self.blocks = []
|
|
|
|
self.cache_size = 0
|
|
|
|
self.refill_event.set()
|
|
|
|
return blocks
|
|
|
|
|
|
|
|
async def reset_height(self, height):
|
|
|
|
"""Reset to prefetch blocks from the block processor's height.
|
|
|
|
|
|
|
|
Used in blockchain reorganisations. This coroutine can be
|
|
|
|
called asynchronously to the _prefetch_blocks coroutine so we
|
|
|
|
must synchronize with a semaphore.
|
|
|
|
"""
|
|
|
|
async with self.semaphore:
|
|
|
|
self.blocks.clear()
|
|
|
|
self.cache_size = 0
|
|
|
|
self.fetched_height = height
|
|
|
|
self.refill_event.set()
|
|
|
|
|
|
|
|
daemon_height = await self.daemon.height()
|
|
|
|
behind = daemon_height - height
|
|
|
|
if behind > 0:
|
|
|
|
self.logger.info(f'catching up to daemon height {daemon_height:,d} '
|
|
|
|
f'({behind:,d} blocks behind)')
|
|
|
|
else:
|
|
|
|
self.logger.info(f'caught up to daemon height {daemon_height:,d}')
|
|
|
|
|
|
|
|
async def _prefetch_blocks(self):
|
|
|
|
"""Prefetch some blocks and put them on the queue.
|
|
|
|
|
|
|
|
Repeats until the queue is full or caught up.
|
|
|
|
"""
|
|
|
|
daemon = self.daemon
|
|
|
|
daemon_height = await daemon.height()
|
|
|
|
async with self.semaphore:
|
|
|
|
while self.cache_size < self.min_cache_size:
|
|
|
|
# Try and catch up all blocks but limit to room in cache.
|
|
|
|
# Constrain fetch count to between 0 and 500 regardless;
|
|
|
|
# testnet can be lumpy.
|
|
|
|
cache_room = self.min_cache_size // self.ave_size
|
|
|
|
count = min(daemon_height - self.fetched_height, cache_room)
|
|
|
|
count = min(500, max(count, 0))
|
|
|
|
if not count:
|
|
|
|
self.caught_up = True
|
|
|
|
return False
|
|
|
|
|
|
|
|
first = self.fetched_height + 1
|
|
|
|
hex_hashes = await daemon.block_hex_hashes(first, count)
|
|
|
|
if self.caught_up:
|
|
|
|
self.logger.info('new block height {:,d} hash {}'
|
|
|
|
.format(first + count-1, hex_hashes[-1]))
|
|
|
|
blocks = await daemon.raw_blocks(hex_hashes)
|
|
|
|
|
|
|
|
assert count == len(blocks)
|
|
|
|
|
|
|
|
# Special handling for genesis block
|
|
|
|
if first == 0:
|
|
|
|
blocks[0] = self.coin.genesis_block(blocks[0])
|
|
|
|
self.logger.info(f'verified genesis block with hash {hex_hashes[0]}')
|
|
|
|
|
|
|
|
# Update our recent average block size estimate
|
|
|
|
size = sum(len(block) for block in blocks)
|
|
|
|
if count >= 10:
|
|
|
|
self.ave_size = size // count
|
|
|
|
else:
|
|
|
|
self.ave_size = (size + (10 - count) * self.ave_size) // 10
|
|
|
|
|
|
|
|
self.blocks.extend(blocks)
|
|
|
|
self.cache_size += size
|
|
|
|
self.fetched_height += count
|
|
|
|
self.blocks_event.set()
|
|
|
|
|
|
|
|
self.refill_event.clear()
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
class ChainError(Exception):
|
|
|
|
"""Raised on error processing blocks."""
|
|
|
|
|
|
|
|
|
2020-04-24 03:17:44 +02:00
|
|
|
NAMESPACE = "wallet_server"
|
2020-05-25 16:24:31 +02:00
|
|
|
HISTOGRAM_BUCKETS = (
|
|
|
|
.005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, 2.5, 5.0, 7.5, 10.0, 15.0, 20.0, 30.0, 60.0, float('inf')
|
|
|
|
)
|
2020-04-24 03:17:44 +02:00
|
|
|
|
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
class BlockProcessor:
|
|
|
|
"""Process blocks and update the DB state to match.
|
|
|
|
|
|
|
|
Employ a prefetcher to prefetch blocks in batches for processing.
|
|
|
|
Coordinate backing up in case of chain reorganisations.
|
|
|
|
"""
|
|
|
|
|
2020-04-24 03:17:44 +02:00
|
|
|
block_count_metric = Gauge(
|
|
|
|
"block_count", "Number of processed blocks", namespace=NAMESPACE
|
|
|
|
)
|
2020-05-25 16:24:31 +02:00
|
|
|
block_update_time_metric = Histogram(
|
|
|
|
"block_time", "Block update times", namespace=NAMESPACE, buckets=HISTOGRAM_BUCKETS
|
|
|
|
)
|
2020-04-24 03:17:44 +02:00
|
|
|
reorg_count_metric = Gauge(
|
|
|
|
"reorg_count", "Number of reorgs", namespace=NAMESPACE
|
|
|
|
)
|
|
|
|
|
2021-07-16 21:12:46 +02:00
|
|
|
def __init__(self, env, db: 'LevelDB', daemon, shutdown_event: asyncio.Event):
|
|
|
|
self.state_lock = asyncio.Lock()
|
2019-12-31 00:47:37 +01:00
|
|
|
self.env = env
|
|
|
|
self.db = db
|
|
|
|
self.daemon = daemon
|
2021-07-16 21:12:46 +02:00
|
|
|
self.mempool = MemPool(env.coin, daemon, db, self.state_lock)
|
2021-07-08 20:29:47 +02:00
|
|
|
self.shutdown_event = shutdown_event
|
2019-12-31 00:47:37 +01:00
|
|
|
self.coin = env.coin
|
2021-05-28 20:10:35 +02:00
|
|
|
if env.coin.NET == 'mainnet':
|
|
|
|
self.ledger = Ledger
|
|
|
|
elif env.coin.NET == 'testnet':
|
|
|
|
self.ledger = TestNetLedger
|
|
|
|
else:
|
|
|
|
self.ledger = RegTestLedger
|
|
|
|
|
2021-07-22 22:10:30 +02:00
|
|
|
self._caught_up_event: Optional[asyncio.Event] = None
|
|
|
|
self.height = 0
|
|
|
|
self.tip = bytes.fromhex(self.coin.GENESIS_HASH)[::-1]
|
|
|
|
self.tx_count = 0
|
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
self.blocks_event = asyncio.Event()
|
|
|
|
self.prefetcher = Prefetcher(daemon, env.coin, self.blocks_event)
|
|
|
|
self.logger = class_logger(__name__, self.__class__.__name__)
|
|
|
|
|
|
|
|
# Meta
|
2021-07-22 22:10:30 +02:00
|
|
|
self.touched_hashXs: Set[bytes] = set()
|
2019-12-31 00:47:37 +01:00
|
|
|
|
|
|
|
# UTXO cache
|
2021-07-26 03:45:42 +02:00
|
|
|
self.utxo_cache: Dict[Tuple[bytes, int], Tuple[bytes, int]] = {}
|
2019-12-31 00:47:37 +01:00
|
|
|
|
2021-02-19 19:19:58 +01:00
|
|
|
# Claimtrie cache
|
2021-07-05 15:52:23 +02:00
|
|
|
self.db_op_stack: Optional[RevertableOpStack] = None
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-06-02 17:00:27 +02:00
|
|
|
# self.search_cache = {}
|
2020-06-04 15:15:21 +02:00
|
|
|
self.history_cache = {}
|
2021-01-21 22:08:33 +01:00
|
|
|
self.status_server = StatusServer()
|
2021-05-20 19:31:40 +02:00
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
#################################
|
|
|
|
# attributes used for calculating stake activations and takeovers per block
|
|
|
|
#################################
|
|
|
|
|
2021-05-24 18:41:44 +02:00
|
|
|
# txo to pending claim
|
2021-07-08 22:08:33 +02:00
|
|
|
self.txo_to_claim: Dict[Tuple[int, int], StagedClaimtrieItem] = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
# claim hash to pending claim txo
|
2021-07-08 22:08:33 +02:00
|
|
|
self.claim_hash_to_txo: Dict[bytes, Tuple[int, int]] = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
# claim hash to lists of pending support txos
|
2021-07-01 23:23:27 +02:00
|
|
|
self.support_txos_by_claim: DefaultDict[bytes, List[Tuple[int, int]]] = defaultdict(list)
|
2021-05-24 18:41:44 +02:00
|
|
|
# support txo: (supported claim hash, support amount)
|
2021-07-01 23:23:27 +02:00
|
|
|
self.support_txo_to_claim: Dict[Tuple[int, int], Tuple[bytes, int]] = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
# removed supports {name: {claim_hash: [(tx_num, nout), ...]}}
|
2021-07-01 23:23:27 +02:00
|
|
|
self.removed_support_txos_by_name_by_claim: DefaultDict[str, DefaultDict[bytes, List[Tuple[int, int]]]] = \
|
|
|
|
defaultdict(lambda: defaultdict(list))
|
|
|
|
self.abandoned_claims: Dict[bytes, StagedClaimtrieItem] = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
# removed activated support amounts by claim hash
|
2021-07-01 23:23:27 +02:00
|
|
|
self.removed_active_support_amount_by_claim: DefaultDict[bytes, List[int]] = defaultdict(list)
|
2021-05-24 18:41:44 +02:00
|
|
|
# pending activated support amounts by claim hash
|
2021-07-01 23:23:27 +02:00
|
|
|
self.activated_support_amount_by_claim: DefaultDict[bytes, List[int]] = defaultdict(list)
|
2021-05-24 18:41:44 +02:00
|
|
|
# pending activated name and claim hash to claim/update txo amount
|
2021-07-01 23:23:27 +02:00
|
|
|
self.activated_claim_amount_by_name_and_hash: Dict[Tuple[str, bytes], int] = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
# pending claim and support activations per claim hash per name,
|
|
|
|
# used to process takeovers due to added activations
|
2021-07-01 23:23:27 +02:00
|
|
|
activation_by_claim_by_name_type = DefaultDict[str, DefaultDict[bytes, List[Tuple[PendingActivationKey, int]]]]
|
|
|
|
self.activation_by_claim_by_name: activation_by_claim_by_name_type = defaultdict(lambda: defaultdict(list))
|
2021-05-26 23:25:03 +02:00
|
|
|
# these are used for detecting early takeovers by not yet activated claims/supports
|
2021-07-01 23:23:27 +02:00
|
|
|
self.possible_future_support_amounts_by_claim_hash: DefaultDict[bytes, List[int]] = defaultdict(list)
|
|
|
|
self.possible_future_claim_amount_by_name_and_hash: Dict[Tuple[str, bytes], int] = {}
|
|
|
|
self.possible_future_support_txos_by_claim_hash: DefaultDict[bytes, List[Tuple[int, int]]] = defaultdict(list)
|
2021-02-21 23:26:13 +01:00
|
|
|
|
2021-07-16 20:46:46 +02:00
|
|
|
self.removed_claims_to_send_es = set() # cumulative changes across blocks to send ES
|
2021-05-27 19:35:41 +02:00
|
|
|
self.touched_claims_to_send_es = set()
|
2021-09-03 06:33:40 +02:00
|
|
|
self.activation_info_to_send_es: DefaultDict[str, List[TrendingNotification]] = defaultdict(list)
|
2021-07-16 20:46:46 +02:00
|
|
|
|
|
|
|
self.removed_claim_hashes: Set[bytes] = set() # per block changes
|
|
|
|
self.touched_claim_hashes: Set[bytes] = set()
|
|
|
|
|
2021-06-18 03:20:57 +02:00
|
|
|
self.signatures_changed = set()
|
2021-05-27 19:35:41 +02:00
|
|
|
|
2021-06-04 22:50:37 +02:00
|
|
|
self.pending_reposted = set()
|
|
|
|
self.pending_channel_counts = defaultdict(lambda: 0)
|
|
|
|
|
|
|
|
self.pending_channels = {}
|
2021-06-15 18:10:28 +02:00
|
|
|
self.amount_cache = {}
|
2021-07-02 23:03:51 +02:00
|
|
|
self.expired_claim_hashes: Set[bytes] = set()
|
2021-06-02 17:00:27 +02:00
|
|
|
|
2021-07-06 23:56:18 +02:00
|
|
|
self.doesnt_have_valid_signature: Set[bytes] = set()
|
|
|
|
self.claim_channels: Dict[bytes, bytes] = {}
|
2021-07-14 19:09:57 +02:00
|
|
|
self.hashXs_by_tx: DefaultDict[bytes, List[int]] = defaultdict(list)
|
2021-07-06 23:56:18 +02:00
|
|
|
|
2021-07-27 22:11:27 +02:00
|
|
|
async def claim_producer(self):
|
2021-06-02 17:00:27 +02:00
|
|
|
if self.db.db_height <= 1:
|
|
|
|
return
|
|
|
|
|
|
|
|
for claim_hash in self.removed_claims_to_send_es:
|
|
|
|
yield 'delete', claim_hash.hex()
|
2021-07-27 22:11:27 +02:00
|
|
|
async for claim in self.db.claims_producer(self.touched_claims_to_send_es):
|
2021-06-18 03:19:31 +02:00
|
|
|
yield 'update', claim
|
2021-05-27 19:35:41 +02:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
async def run_in_thread_with_lock(self, func, *args):
|
|
|
|
# Run in a thread to prevent blocking. Shielded so that
|
|
|
|
# cancellations from shutdown don't lose work - when the task
|
|
|
|
# completes the data will be flushed and then we shut down.
|
|
|
|
# Take the state lock to be certain in-memory state is
|
|
|
|
# consistent and not being updated elsewhere.
|
|
|
|
async def run_in_thread_locked():
|
|
|
|
async with self.state_lock:
|
2021-07-22 23:33:54 +02:00
|
|
|
return await asyncio.get_event_loop().run_in_executor(None, func, *args)
|
2019-12-31 00:47:37 +01:00
|
|
|
return await asyncio.shield(run_in_thread_locked())
|
|
|
|
|
2021-07-27 00:07:16 +02:00
|
|
|
@staticmethod
|
|
|
|
async def run_in_thread(func, *args):
|
|
|
|
async def run_in_thread():
|
|
|
|
return await asyncio.get_event_loop().run_in_executor(None, func, *args)
|
|
|
|
return await asyncio.shield(run_in_thread())
|
2021-07-27 22:11:27 +02:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
async def check_and_advance_blocks(self, raw_blocks):
|
|
|
|
"""Process the list of raw blocks passed. Detects and handles
|
|
|
|
reorgs.
|
|
|
|
"""
|
2021-07-14 19:09:57 +02:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
if not raw_blocks:
|
|
|
|
return
|
|
|
|
first = self.height + 1
|
|
|
|
blocks = [self.coin.block(raw_block, first + n)
|
|
|
|
for n, raw_block in enumerate(raw_blocks)]
|
|
|
|
headers = [block.header for block in blocks]
|
|
|
|
hprevs = [self.coin.header_prevhash(h) for h in headers]
|
|
|
|
chain = [self.tip] + [self.coin.header_hash(h) for h in headers[:-1]]
|
|
|
|
|
|
|
|
if hprevs == chain:
|
2021-07-14 19:09:57 +02:00
|
|
|
total_start = time.perf_counter()
|
2021-02-19 19:19:58 +01:00
|
|
|
try:
|
2021-05-05 21:39:52 +02:00
|
|
|
for block in blocks:
|
2021-06-16 22:47:41 +02:00
|
|
|
start = time.perf_counter()
|
2021-07-27 00:07:16 +02:00
|
|
|
await self.run_in_thread(self.advance_block, block)
|
2021-07-24 20:36:49 +02:00
|
|
|
await self.flush()
|
2021-08-17 03:39:00 +02:00
|
|
|
|
2021-06-18 03:19:31 +02:00
|
|
|
self.logger.info("advanced to %i in %0.3fs", self.height, time.perf_counter() - start)
|
2021-07-29 20:15:56 +02:00
|
|
|
if self.height == self.coin.nExtendedClaimExpirationForkHeight:
|
|
|
|
self.logger.warning(
|
|
|
|
"applying extended claim expiration fork on claims accepted by, %i", self.height
|
|
|
|
)
|
|
|
|
await self.run_in_thread(self.db.apply_expiration_extension_fork)
|
2021-09-03 06:33:40 +02:00
|
|
|
# TODO: we shouldnt wait on the search index updating before advancing to the next block
|
|
|
|
if not self.db.first_sync:
|
|
|
|
self.db.reload_blocking_filtering_streams()
|
|
|
|
await self.db.search_index.claim_consumer(self.claim_producer())
|
|
|
|
await self.db.search_index.apply_filters(self.db.blocked_streams, self.db.blocked_channels,
|
2021-08-30 18:16:07 +02:00
|
|
|
self.db.filtered_streams, self.db.filtered_channels)
|
2021-09-03 06:33:40 +02:00
|
|
|
await self.db.search_index.update_trending_score(self.activation_info_to_send_es)
|
|
|
|
self.db.search_index.clear_caches()
|
|
|
|
self.touched_claims_to_send_es.clear()
|
|
|
|
self.removed_claims_to_send_es.clear()
|
|
|
|
self.activation_info_to_send_es.clear()
|
2021-07-16 20:46:46 +02:00
|
|
|
# print("******************\n")
|
2021-02-19 19:19:58 +01:00
|
|
|
except:
|
|
|
|
self.logger.exception("advance blocks failed")
|
|
|
|
raise
|
2021-07-14 19:09:57 +02:00
|
|
|
processed_time = time.perf_counter() - total_start
|
2020-04-24 03:17:44 +02:00
|
|
|
self.block_count_metric.set(self.height)
|
|
|
|
self.block_update_time_metric.observe(processed_time)
|
2021-01-21 22:08:33 +01:00
|
|
|
self.status_server.set_height(self.db.fs_height, self.db.db_tip)
|
2019-12-31 00:47:37 +01:00
|
|
|
if not self.db.first_sync:
|
|
|
|
s = '' if len(blocks) == 1 else 's'
|
2020-02-04 16:26:22 +01:00
|
|
|
self.logger.info('processed {:,d} block{} in {:.1f}s'.format(len(blocks), s, processed_time))
|
2019-12-31 00:47:37 +01:00
|
|
|
if self._caught_up_event.is_set():
|
2021-07-22 22:10:30 +02:00
|
|
|
await self.mempool.on_block(self.touched_hashXs, self.height)
|
|
|
|
self.touched_hashXs.clear()
|
2019-12-31 00:47:37 +01:00
|
|
|
elif hprevs[0] != chain[0]:
|
2021-07-14 19:09:57 +02:00
|
|
|
min_start_height = max(self.height - self.coin.REORG_LIMIT, 0)
|
|
|
|
count = 1
|
|
|
|
block_hashes_from_lbrycrd = await self.daemon.block_hex_hashes(
|
|
|
|
min_start_height, self.coin.REORG_LIMIT
|
|
|
|
)
|
|
|
|
for height, block_hash in zip(
|
|
|
|
reversed(range(min_start_height, min_start_height + self.coin.REORG_LIMIT)),
|
|
|
|
reversed(block_hashes_from_lbrycrd)):
|
2021-07-22 22:09:18 +02:00
|
|
|
if self.db.get_block_hash(height)[::-1].hex() == block_hash:
|
2021-07-14 19:09:57 +02:00
|
|
|
break
|
|
|
|
count += 1
|
|
|
|
self.logger.warning(f"blockchain reorg detected at {self.height}, unwinding last {count} blocks")
|
|
|
|
try:
|
|
|
|
assert count > 0, count
|
|
|
|
for _ in range(count):
|
2021-07-24 20:36:49 +02:00
|
|
|
await self.backup_block()
|
|
|
|
await self.flush()
|
|
|
|
self.logger.info(f'backed up to height {self.height:,d}')
|
|
|
|
|
2021-08-30 18:16:07 +02:00
|
|
|
await self.db._read_claim_txos() # TODO: don't do this
|
|
|
|
for touched in self.touched_claims_to_send_es:
|
|
|
|
if not self.db.get_claim_txo(touched):
|
|
|
|
self.removed_claims_to_send_es.add(touched)
|
|
|
|
self.touched_claims_to_send_es.difference_update(self.removed_claims_to_send_es)
|
|
|
|
await self.db.search_index.claim_consumer(self.claim_producer())
|
|
|
|
self.db.search_index.clear_caches()
|
|
|
|
self.touched_claims_to_send_es.clear()
|
|
|
|
self.removed_claims_to_send_es.clear()
|
2021-09-03 06:33:40 +02:00
|
|
|
self.activation_info_to_send_es.clear()
|
2021-07-14 19:09:57 +02:00
|
|
|
await self.prefetcher.reset_height(self.height)
|
|
|
|
self.reorg_count_metric.inc()
|
|
|
|
except:
|
|
|
|
self.logger.exception("reorg blocks failed")
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
self.logger.info("backed up to block %i", self.height)
|
2019-12-31 00:47:37 +01:00
|
|
|
else:
|
|
|
|
# It is probably possible but extremely rare that what
|
|
|
|
# bitcoind returns doesn't form a chain because it
|
|
|
|
# reorg-ed the chain as it was processing the batched
|
|
|
|
# block hash requests. Should this happen it's simplest
|
|
|
|
# just to reset the prefetcher and try again.
|
|
|
|
self.logger.warning('daemon blocks do not form a chain; '
|
|
|
|
'resetting the prefetcher')
|
|
|
|
await self.prefetcher.reset_height(self.height)
|
|
|
|
|
|
|
|
# - Flushing
|
|
|
|
def flush_data(self):
|
|
|
|
"""The data for a flush. The lock must be taken."""
|
|
|
|
assert self.state_lock.locked()
|
2021-07-22 22:10:30 +02:00
|
|
|
return FlushData(self.height, self.tx_count, self.db_op_stack, self.tip)
|
2019-12-31 00:47:37 +01:00
|
|
|
|
2021-07-08 22:08:33 +02:00
|
|
|
async def flush(self):
|
2019-12-31 00:47:37 +01:00
|
|
|
def flush():
|
2021-05-05 21:53:17 +02:00
|
|
|
self.db.flush_dbs(self.flush_data())
|
2021-07-24 20:36:49 +02:00
|
|
|
self.clear_after_advance_or_reorg()
|
2019-12-31 00:47:37 +01:00
|
|
|
await self.run_in_thread_with_lock(flush)
|
|
|
|
|
2021-07-14 19:09:57 +02:00
|
|
|
async def write_state(self):
|
|
|
|
def flush():
|
2021-07-28 19:59:56 +02:00
|
|
|
with self.db.db.write_batch(transaction=True) as batch:
|
2021-07-14 19:09:57 +02:00
|
|
|
self.db.write_db_state(batch)
|
|
|
|
|
|
|
|
await self.run_in_thread_with_lock(flush)
|
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
def _add_claim_or_update(self, height: int, txo: 'Output', tx_hash: bytes, tx_num: int, nout: int,
|
2021-07-03 19:56:03 +02:00
|
|
|
spent_claims: typing.Dict[bytes, typing.Tuple[int, int, str]]):
|
2021-08-15 22:24:07 +02:00
|
|
|
try:
|
|
|
|
claim_name = txo.script.values['claim_name'].decode()
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
claim_name = ''.join(chr(c) for c in txo.script.values['claim_name'])
|
2021-02-19 19:19:58 +01:00
|
|
|
try:
|
2021-08-12 22:08:52 +02:00
|
|
|
normalized_name = txo.normalized_name
|
2021-02-19 19:19:58 +01:00
|
|
|
except UnicodeDecodeError:
|
2021-08-12 22:08:52 +02:00
|
|
|
normalized_name = claim_name
|
2021-05-20 19:31:40 +02:00
|
|
|
if txo.script.is_claim_name:
|
|
|
|
claim_hash = hash160(tx_hash + pack('>I', nout))[::-1]
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tnew {claim_hash.hex()} ({tx_num} {txo.amount})")
|
2021-02-19 19:19:58 +01:00
|
|
|
else:
|
|
|
|
claim_hash = txo.claim_hash[::-1]
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tupdate {claim_hash.hex()} ({tx_num} {txo.amount})")
|
2021-06-04 22:50:37 +02:00
|
|
|
|
|
|
|
signing_channel_hash = None
|
|
|
|
channel_signature_is_valid = False
|
2021-02-19 19:19:58 +01:00
|
|
|
try:
|
|
|
|
signable = txo.signable
|
2021-06-04 22:50:37 +02:00
|
|
|
is_repost = txo.claim.is_repost
|
|
|
|
is_channel = txo.claim.is_channel
|
|
|
|
if txo.claim.is_signed:
|
|
|
|
signing_channel_hash = txo.signable.signing_channel_hash[::-1]
|
2021-02-19 19:19:58 +01:00
|
|
|
except: # google.protobuf.message.DecodeError: Could not parse JSON.
|
|
|
|
signable = None
|
2021-06-04 22:50:37 +02:00
|
|
|
is_repost = False
|
|
|
|
is_channel = False
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-06-02 17:00:27 +02:00
|
|
|
reposted_claim_hash = None
|
2021-06-04 22:50:37 +02:00
|
|
|
|
|
|
|
if is_repost:
|
2021-06-02 17:00:27 +02:00
|
|
|
reposted_claim_hash = txo.claim.repost.reference.claim_hash[::-1]
|
2021-06-04 22:50:37 +02:00
|
|
|
self.pending_reposted.add(reposted_claim_hash)
|
|
|
|
|
|
|
|
if is_channel:
|
|
|
|
self.pending_channels[claim_hash] = txo.claim.channel.public_key_bytes
|
2021-06-02 17:00:27 +02:00
|
|
|
|
2021-07-06 23:56:18 +02:00
|
|
|
self.doesnt_have_valid_signature.add(claim_hash)
|
2021-06-04 22:50:37 +02:00
|
|
|
raw_channel_tx = None
|
2021-02-19 19:19:58 +01:00
|
|
|
if signable and signable.signing_channel_hash:
|
2021-06-04 22:50:37 +02:00
|
|
|
signing_channel = self.db.get_claim_txo(signing_channel_hash)
|
2021-07-06 23:56:18 +02:00
|
|
|
|
2021-06-04 22:50:37 +02:00
|
|
|
if signing_channel:
|
2021-08-14 02:02:42 +02:00
|
|
|
raw_channel_tx = self.db.prefix_db.tx.get(self.db.total_transactions[signing_channel.tx_num]).raw_tx
|
2021-06-04 22:50:37 +02:00
|
|
|
channel_pub_key_bytes = None
|
|
|
|
try:
|
|
|
|
if not signing_channel:
|
|
|
|
if txo.signable.signing_channel_hash[::-1] in self.pending_channels:
|
2021-07-06 23:56:18 +02:00
|
|
|
channel_pub_key_bytes = self.pending_channels[signing_channel_hash]
|
2021-06-04 22:50:37 +02:00
|
|
|
elif raw_channel_tx:
|
2021-06-09 22:29:16 +02:00
|
|
|
chan_output = self.coin.transaction(raw_channel_tx).outputs[signing_channel.position]
|
2021-06-04 22:50:37 +02:00
|
|
|
chan_script = OutputScript(chan_output.pk_script)
|
|
|
|
chan_script.parse()
|
|
|
|
channel_meta = Claim.from_bytes(chan_script.values['claim'])
|
|
|
|
|
|
|
|
channel_pub_key_bytes = channel_meta.channel.public_key_bytes
|
|
|
|
if channel_pub_key_bytes:
|
|
|
|
channel_signature_is_valid = Output.is_signature_valid(
|
|
|
|
txo.get_encoded_signature(), txo.get_signature_digest(self.ledger), channel_pub_key_bytes
|
|
|
|
)
|
|
|
|
if channel_signature_is_valid:
|
|
|
|
self.pending_channel_counts[signing_channel_hash] += 1
|
2021-07-06 23:56:18 +02:00
|
|
|
self.doesnt_have_valid_signature.remove(claim_hash)
|
|
|
|
self.claim_channels[claim_hash] = signing_channel_hash
|
2021-06-04 22:50:37 +02:00
|
|
|
except:
|
|
|
|
self.logger.exception(f"error validating channel signature for %s:%i", tx_hash[::-1].hex(), nout)
|
|
|
|
|
2021-06-02 17:00:27 +02:00
|
|
|
if txo.script.is_claim_name: # it's a root claim
|
2021-05-20 19:31:40 +02:00
|
|
|
root_tx_num, root_idx = tx_num, nout
|
2021-08-30 18:16:07 +02:00
|
|
|
previous_amount = 0
|
2021-06-02 17:00:27 +02:00
|
|
|
else: # it's a claim update
|
2021-02-19 19:19:58 +01:00
|
|
|
if claim_hash not in spent_claims:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tthis is a wonky tx, contains unlinked claim update {claim_hash.hex()}")
|
2021-07-03 19:56:03 +02:00
|
|
|
return
|
2021-08-12 22:08:52 +02:00
|
|
|
if normalized_name != spent_claims[claim_hash][2]:
|
2021-07-30 20:41:01 +02:00
|
|
|
self.logger.warning(
|
|
|
|
f"{tx_hash[::-1].hex()} contains mismatched name for claim update {claim_hash.hex()}"
|
|
|
|
)
|
|
|
|
return
|
2021-02-19 19:19:58 +01:00
|
|
|
(prev_tx_num, prev_idx, _) = spent_claims.pop(claim_hash)
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tupdate {claim_hash.hex()} {tx_hash[::-1].hex()} {txo.amount}")
|
2021-07-01 23:23:27 +02:00
|
|
|
if (prev_tx_num, prev_idx) in self.txo_to_claim:
|
|
|
|
previous_claim = self.txo_to_claim.pop((prev_tx_num, prev_idx))
|
2021-07-30 20:35:54 +02:00
|
|
|
self.claim_hash_to_txo.pop(claim_hash)
|
2021-07-01 02:09:17 +02:00
|
|
|
root_tx_num, root_idx = previous_claim.root_tx_num, previous_claim.root_position
|
2021-02-19 19:19:58 +01:00
|
|
|
else:
|
2021-07-06 23:56:18 +02:00
|
|
|
previous_claim = self._make_pending_claim_txo(claim_hash)
|
|
|
|
root_tx_num, root_idx = previous_claim.root_tx_num, previous_claim.root_position
|
2021-05-20 19:31:40 +02:00
|
|
|
activation = self.db.get_activation(prev_tx_num, prev_idx)
|
2021-08-12 22:08:52 +02:00
|
|
|
claim_name = previous_claim.name
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-05-20 19:31:40 +02:00
|
|
|
StagedActivation(
|
2021-08-12 22:08:52 +02:00
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, claim_hash, prev_tx_num, prev_idx, activation, normalized_name,
|
2021-07-06 23:56:18 +02:00
|
|
|
previous_claim.amount
|
2021-05-20 19:31:40 +02:00
|
|
|
).get_remove_activate_ops()
|
|
|
|
)
|
2021-08-30 18:16:07 +02:00
|
|
|
previous_amount = previous_claim.amount
|
2021-07-06 23:56:18 +02:00
|
|
|
|
2021-07-26 03:53:22 +02:00
|
|
|
self.db.claim_to_txo[claim_hash] = ClaimToTXOValue(
|
|
|
|
tx_num, nout, root_tx_num, root_idx, txo.amount, channel_signature_is_valid, claim_name
|
|
|
|
)
|
|
|
|
self.db.txo_to_claim[(tx_num, nout)] = claim_hash
|
|
|
|
|
2021-02-19 19:19:58 +01:00
|
|
|
pending = StagedClaimtrieItem(
|
2021-08-12 22:08:52 +02:00
|
|
|
claim_name, normalized_name, claim_hash, txo.amount, self.coin.get_expiration_height(height), tx_num, nout,
|
|
|
|
root_tx_num, root_idx, channel_signature_is_valid, signing_channel_hash, reposted_claim_hash
|
2021-02-19 19:19:58 +01:00
|
|
|
)
|
2021-07-01 23:23:27 +02:00
|
|
|
self.txo_to_claim[(tx_num, nout)] = pending
|
|
|
|
self.claim_hash_to_txo[claim_hash] = (tx_num, nout)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(pending.get_add_claim_utxo_ops())
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-08-30 18:16:07 +02:00
|
|
|
def _add_support(self, height: int, txo: 'Output', tx_num: int, nout: int):
|
2021-02-19 19:19:58 +01:00
|
|
|
supported_claim_hash = txo.claim_hash[::-1]
|
2021-07-01 23:23:27 +02:00
|
|
|
self.support_txos_by_claim[supported_claim_hash].append((tx_num, nout))
|
|
|
|
self.support_txo_to_claim[(tx_num, nout)] = supported_claim_hash, txo.amount
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tsupport claim {supported_claim_hash.hex()} +{txo.amount}")
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(StagedClaimtrieSupport(
|
2021-05-20 19:31:40 +02:00
|
|
|
supported_claim_hash, tx_num, nout, txo.amount
|
2021-07-03 19:56:03 +02:00
|
|
|
).get_add_support_utxo_ops())
|
2021-08-30 18:16:07 +02:00
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
def _add_claim_or_support(self, height: int, tx_hash: bytes, tx_num: int, nout: int, txo: 'Output',
|
2021-07-03 19:56:03 +02:00
|
|
|
spent_claims: typing.Dict[bytes, Tuple[int, int, str]]):
|
2021-05-20 19:31:40 +02:00
|
|
|
if txo.script.is_claim_name or txo.script.is_update_claim:
|
2021-07-03 19:56:03 +02:00
|
|
|
self._add_claim_or_update(height, txo, tx_hash, tx_num, nout, spent_claims)
|
2021-05-20 19:31:40 +02:00
|
|
|
elif txo.script.is_support_claim or txo.script.is_support_claim_data:
|
2021-08-30 18:16:07 +02:00
|
|
|
self._add_support(height, txo, tx_num, nout)
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-08-30 18:16:07 +02:00
|
|
|
def _spend_support_txo(self, height: int, txin: TxInput):
|
2021-02-19 19:19:58 +01:00
|
|
|
txin_num = self.db.transaction_num_mapping[txin.prev_hash]
|
2021-08-30 18:16:07 +02:00
|
|
|
activation = 0
|
2021-07-01 23:23:27 +02:00
|
|
|
if (txin_num, txin.prev_idx) in self.support_txo_to_claim:
|
|
|
|
spent_support, support_amount = self.support_txo_to_claim.pop((txin_num, txin.prev_idx))
|
|
|
|
self.support_txos_by_claim[spent_support].remove((txin_num, txin.prev_idx))
|
2021-05-20 19:31:40 +02:00
|
|
|
supported_name = self._get_pending_claim_name(spent_support)
|
2021-07-01 23:23:27 +02:00
|
|
|
self.removed_support_txos_by_name_by_claim[supported_name][spent_support].append((txin_num, txin.prev_idx))
|
2021-08-30 18:16:07 +02:00
|
|
|
else:
|
|
|
|
spent_support, support_amount = self.db.get_supported_claim_from_txo(txin_num, txin.prev_idx)
|
|
|
|
if not spent_support: # it is not a support
|
|
|
|
return
|
2021-05-20 19:31:40 +02:00
|
|
|
supported_name = self._get_pending_claim_name(spent_support)
|
2021-06-06 19:02:52 +02:00
|
|
|
if supported_name is not None:
|
2021-08-30 18:16:07 +02:00
|
|
|
self.removed_support_txos_by_name_by_claim[supported_name][spent_support].append(
|
|
|
|
(txin_num, txin.prev_idx))
|
2021-05-20 19:31:40 +02:00
|
|
|
activation = self.db.get_activation(txin_num, txin.prev_idx, is_support=True)
|
2021-06-16 17:42:37 +02:00
|
|
|
if 0 < activation < self.height + 1:
|
2021-07-01 23:23:27 +02:00
|
|
|
self.removed_active_support_amount_by_claim[spent_support].append(support_amount)
|
2021-06-15 18:05:45 +02:00
|
|
|
if supported_name is not None and activation > 0:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(StagedActivation(
|
2021-05-24 18:41:44 +02:00
|
|
|
ACTIVATED_SUPPORT_TXO_TYPE, spent_support, txin_num, txin.prev_idx, activation, supported_name,
|
|
|
|
support_amount
|
2021-06-06 19:05:09 +02:00
|
|
|
).get_remove_activate_ops())
|
2021-08-30 18:16:07 +02:00
|
|
|
# print(f"\tspent support for {spent_support.hex()} activation:{activation} {support_amount}")
|
|
|
|
self.db_op_stack.extend_ops(StagedClaimtrieSupport(
|
|
|
|
spent_support, txin_num, txin.prev_idx, support_amount
|
|
|
|
).get_spend_support_txo_ops())
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-07-03 19:56:03 +02:00
|
|
|
def _spend_claim_txo(self, txin: TxInput, spent_claims: Dict[bytes, Tuple[int, int, str]]) -> bool:
|
2021-02-19 19:19:58 +01:00
|
|
|
txin_num = self.db.transaction_num_mapping[txin.prev_hash]
|
2021-07-01 23:23:27 +02:00
|
|
|
if (txin_num, txin.prev_idx) in self.txo_to_claim:
|
|
|
|
spent = self.txo_to_claim[(txin_num, txin.prev_idx)]
|
2021-02-19 19:19:58 +01:00
|
|
|
else:
|
2021-07-26 03:53:22 +02:00
|
|
|
if (txin_num, txin.prev_idx) not in self.db.txo_to_claim: # txo is not a claim
|
|
|
|
return False
|
2021-05-20 19:31:40 +02:00
|
|
|
spent_claim_hash_and_name = self.db.get_claim_from_txo(
|
2021-02-19 19:19:58 +01:00
|
|
|
txin_num, txin.prev_idx
|
|
|
|
)
|
2021-07-26 03:53:22 +02:00
|
|
|
assert spent_claim_hash_and_name is not None
|
2021-07-06 23:56:18 +02:00
|
|
|
spent = self._make_pending_claim_txo(spent_claim_hash_and_name.claim_hash)
|
2021-07-26 03:53:22 +02:00
|
|
|
self.db.claim_to_txo.pop(self.db.txo_to_claim.pop((txin_num, txin.prev_idx)))
|
2021-06-02 17:00:27 +02:00
|
|
|
if spent.reposted_claim_hash:
|
2021-06-04 22:50:37 +02:00
|
|
|
self.pending_reposted.add(spent.reposted_claim_hash)
|
|
|
|
if spent.signing_hash and spent.channel_signature_is_valid:
|
|
|
|
self.pending_channel_counts[spent.signing_hash] -= 1
|
2021-08-12 22:08:52 +02:00
|
|
|
spent_claims[spent.claim_hash] = (spent.tx_num, spent.position, spent.normalized_name)
|
2021-06-04 23:41:26 +02:00
|
|
|
# print(f"\tspend lbry://{spent.name}#{spent.claim_hash.hex()}")
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(spent.get_spend_claim_txo_ops())
|
2021-07-03 19:56:03 +02:00
|
|
|
return True
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-08-30 18:16:07 +02:00
|
|
|
def _spend_claim_or_support_txo(self, height: int, txin: TxInput, spent_claims):
|
2021-07-03 19:56:03 +02:00
|
|
|
if not self._spend_claim_txo(txin, spent_claims):
|
2021-08-30 18:16:07 +02:00
|
|
|
self._spend_support_txo(height, txin)
|
2021-05-20 19:31:40 +02:00
|
|
|
|
2021-08-12 22:08:52 +02:00
|
|
|
def _abandon_claim(self, claim_hash: bytes, tx_num: int, nout: int, normalized_name: str):
|
2021-07-01 23:23:27 +02:00
|
|
|
if (tx_num, nout) in self.txo_to_claim:
|
|
|
|
pending = self.txo_to_claim.pop((tx_num, nout))
|
2021-07-30 20:35:54 +02:00
|
|
|
self.claim_hash_to_txo.pop(claim_hash)
|
2021-07-01 23:23:27 +02:00
|
|
|
self.abandoned_claims[pending.claim_hash] = pending
|
2021-07-01 02:09:17 +02:00
|
|
|
claim_root_tx_num, claim_root_idx = pending.root_tx_num, pending.root_position
|
2021-05-20 19:31:40 +02:00
|
|
|
prev_amount, prev_signing_hash = pending.amount, pending.signing_hash
|
2021-08-12 22:08:52 +02:00
|
|
|
reposted_claim_hash, name = pending.reposted_claim_hash, pending.name
|
2021-05-20 19:31:40 +02:00
|
|
|
expiration = self.coin.get_expiration_height(self.height)
|
2021-06-04 22:50:37 +02:00
|
|
|
signature_is_valid = pending.channel_signature_is_valid
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
2021-06-09 22:29:16 +02:00
|
|
|
v = self.db.get_claim_txo(
|
2021-05-20 19:31:40 +02:00
|
|
|
claim_hash
|
|
|
|
)
|
|
|
|
claim_root_tx_num, claim_root_idx, prev_amount = v.root_tx_num, v.root_position, v.amount
|
2021-08-12 22:08:52 +02:00
|
|
|
signature_is_valid, name = v.channel_signature_is_valid, v.name
|
2021-06-22 23:25:23 +02:00
|
|
|
prev_signing_hash = self.db.get_channel_for_claim(claim_hash, tx_num, nout)
|
2021-06-02 17:00:27 +02:00
|
|
|
reposted_claim_hash = self.db.get_repost(claim_hash)
|
2021-05-20 19:31:40 +02:00
|
|
|
expiration = self.coin.get_expiration_height(bisect_right(self.db.tx_counts, tx_num))
|
2021-07-01 23:23:27 +02:00
|
|
|
self.abandoned_claims[claim_hash] = staged = StagedClaimtrieItem(
|
2021-08-12 22:08:52 +02:00
|
|
|
name, normalized_name, claim_hash, prev_amount, expiration, tx_num, nout, claim_root_tx_num,
|
2021-06-04 22:50:37 +02:00
|
|
|
claim_root_idx, signature_is_valid, prev_signing_hash, reposted_claim_hash
|
2021-05-20 19:31:40 +02:00
|
|
|
)
|
2021-06-04 22:50:37 +02:00
|
|
|
if prev_signing_hash and prev_signing_hash in self.pending_channel_counts:
|
|
|
|
self.pending_channel_counts.pop(prev_signing_hash)
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-07-01 23:23:27 +02:00
|
|
|
for support_txo_to_clear in self.support_txos_by_claim[claim_hash]:
|
|
|
|
self.support_txo_to_claim.pop(support_txo_to_clear)
|
|
|
|
self.support_txos_by_claim[claim_hash].clear()
|
|
|
|
self.support_txos_by_claim.pop(claim_hash)
|
2021-08-12 22:08:52 +02:00
|
|
|
if normalized_name.startswith('@'): # abandon a channel, invalidate signatures
|
2021-07-06 23:56:18 +02:00
|
|
|
self._invalidate_channel_signatures(claim_hash)
|
|
|
|
|
|
|
|
def _invalidate_channel_signatures(self, claim_hash: bytes):
|
|
|
|
for k, signed_claim_hash in self.db.db.iterator(
|
|
|
|
prefix=Prefixes.channel_to_claim.pack_partial_key(claim_hash)):
|
|
|
|
if signed_claim_hash in self.abandoned_claims or signed_claim_hash in self.expired_claim_hashes:
|
|
|
|
continue
|
|
|
|
# there is no longer a signing channel for this claim as of this block
|
|
|
|
if signed_claim_hash in self.doesnt_have_valid_signature:
|
|
|
|
continue
|
|
|
|
# the signing channel changed in this block
|
|
|
|
if signed_claim_hash in self.claim_channels and signed_claim_hash != self.claim_channels[signed_claim_hash]:
|
|
|
|
continue
|
2021-07-05 19:05:02 +02:00
|
|
|
|
2021-07-06 23:56:18 +02:00
|
|
|
# if the claim with an invalidated signature is in this block, update the StagedClaimtrieItem
|
|
|
|
# so that if we later try to spend it in this block we won't try to delete the channel info twice
|
|
|
|
if signed_claim_hash in self.claim_hash_to_txo:
|
|
|
|
signed_claim_txo = self.claim_hash_to_txo[signed_claim_hash]
|
|
|
|
claim = self.txo_to_claim[signed_claim_txo]
|
|
|
|
if claim.signing_hash != claim_hash: # claim was already invalidated this block
|
2021-06-18 03:20:57 +02:00
|
|
|
continue
|
2021-07-06 23:56:18 +02:00
|
|
|
self.txo_to_claim[signed_claim_txo] = claim.invalidate_signature()
|
|
|
|
else:
|
|
|
|
claim = self._make_pending_claim_txo(signed_claim_hash)
|
|
|
|
self.signatures_changed.add(signed_claim_hash)
|
|
|
|
self.pending_channel_counts[claim_hash] -= 1
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(claim.get_invalidate_signature_ops())
|
2021-07-06 23:56:18 +02:00
|
|
|
|
|
|
|
for staged in list(self.txo_to_claim.values()):
|
2021-07-20 21:10:45 +02:00
|
|
|
needs_invalidate = staged.claim_hash not in self.doesnt_have_valid_signature
|
|
|
|
if staged.signing_hash == claim_hash and needs_invalidate:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(staged.get_invalidate_signature_ops())
|
2021-07-06 23:56:18 +02:00
|
|
|
self.txo_to_claim[self.claim_hash_to_txo[staged.claim_hash]] = staged.invalidate_signature()
|
|
|
|
self.signatures_changed.add(staged.claim_hash)
|
|
|
|
self.pending_channel_counts[claim_hash] -= 1
|
|
|
|
|
|
|
|
def _make_pending_claim_txo(self, claim_hash: bytes):
|
|
|
|
claim = self.db.get_claim_txo(claim_hash)
|
|
|
|
if claim_hash in self.doesnt_have_valid_signature:
|
|
|
|
signing_hash = None
|
|
|
|
else:
|
|
|
|
signing_hash = self.db.get_channel_for_claim(claim_hash, claim.tx_num, claim.position)
|
|
|
|
reposted_claim_hash = self.db.get_repost(claim_hash)
|
|
|
|
return StagedClaimtrieItem(
|
2021-08-12 22:08:52 +02:00
|
|
|
claim.name, claim.normalized_name, claim_hash, claim.amount,
|
2021-07-29 20:15:56 +02:00
|
|
|
self.coin.get_expiration_height(
|
|
|
|
bisect_right(self.db.tx_counts, claim.tx_num),
|
|
|
|
extended=self.height >= self.coin.nExtendedClaimExpirationForkHeight
|
|
|
|
),
|
2021-07-06 23:56:18 +02:00
|
|
|
claim.tx_num, claim.position, claim.root_tx_num, claim.root_position,
|
|
|
|
claim.channel_signature_is_valid, signing_hash, reposted_claim_hash
|
|
|
|
)
|
2021-07-05 17:39:10 +02:00
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
def _expire_claims(self, height: int):
|
2021-02-21 23:26:13 +01:00
|
|
|
expired = self.db.get_expired_by_height(height)
|
2021-07-02 23:03:51 +02:00
|
|
|
self.expired_claim_hashes.update(set(expired.keys()))
|
2021-02-21 23:26:13 +01:00
|
|
|
spent_claims = {}
|
|
|
|
for expired_claim_hash, (tx_num, position, name, txi) in expired.items():
|
2021-07-01 23:23:27 +02:00
|
|
|
if (tx_num, position) not in self.txo_to_claim:
|
2021-07-05 15:52:12 +02:00
|
|
|
self._spend_claim_txo(txi, spent_claims)
|
2021-02-21 23:26:13 +01:00
|
|
|
if expired:
|
2021-07-21 18:53:51 +02:00
|
|
|
# abandon the channels last to handle abandoned signed claims in the same tx,
|
|
|
|
# see test_abandon_channel_and_claims_in_same_tx
|
|
|
|
expired_channels = {}
|
2021-08-12 22:08:52 +02:00
|
|
|
for abandoned_claim_hash, (tx_num, nout, normalized_name) in spent_claims.items():
|
|
|
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, normalized_name)
|
2021-07-21 18:53:51 +02:00
|
|
|
|
2021-08-12 22:08:52 +02:00
|
|
|
if normalized_name.startswith('@'):
|
|
|
|
expired_channels[abandoned_claim_hash] = (tx_num, nout, normalized_name)
|
2021-07-21 18:53:51 +02:00
|
|
|
else:
|
|
|
|
# print(f"\texpire {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
2021-08-12 22:08:52 +02:00
|
|
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, normalized_name)
|
2021-07-21 18:53:51 +02:00
|
|
|
|
|
|
|
# do this to follow the same content claim removing pathway as if a claim (possible channel) was abandoned
|
2021-08-12 22:08:52 +02:00
|
|
|
for abandoned_claim_hash, (tx_num, nout, normalized_name) in expired_channels.items():
|
2021-06-22 23:25:23 +02:00
|
|
|
# print(f"\texpire {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
2021-08-12 22:08:52 +02:00
|
|
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, normalized_name)
|
2021-05-05 22:17:32 +02:00
|
|
|
|
2021-06-15 18:10:28 +02:00
|
|
|
def _cached_get_active_amount(self, claim_hash: bytes, txo_type: int, height: int) -> int:
|
|
|
|
if (claim_hash, txo_type, height) in self.amount_cache:
|
|
|
|
return self.amount_cache[(claim_hash, txo_type, height)]
|
2021-07-26 19:25:50 +02:00
|
|
|
if txo_type == ACTIVATED_CLAIM_TXO_TYPE:
|
2021-09-16 23:54:59 +02:00
|
|
|
if claim_hash in self.claim_hash_to_txo:
|
|
|
|
amount = self.txo_to_claim[self.claim_hash_to_txo[claim_hash]].amount
|
|
|
|
else:
|
|
|
|
amount = self.db.get_active_amount_as_of_height(
|
|
|
|
claim_hash, height
|
|
|
|
)
|
|
|
|
self.amount_cache[(claim_hash, txo_type, height)] = amount
|
2021-07-26 19:25:50 +02:00
|
|
|
else:
|
|
|
|
self.amount_cache[(claim_hash, txo_type, height)] = amount = self.db._get_active_amount(
|
|
|
|
claim_hash, txo_type, height
|
|
|
|
)
|
2021-06-15 18:10:28 +02:00
|
|
|
return amount
|
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
def _get_pending_claim_amount(self, name: str, claim_hash: bytes, height=None) -> int:
|
2021-07-01 23:23:27 +02:00
|
|
|
if (name, claim_hash) in self.activated_claim_amount_by_name_and_hash:
|
|
|
|
return self.activated_claim_amount_by_name_and_hash[(name, claim_hash)]
|
|
|
|
if (name, claim_hash) in self.possible_future_claim_amount_by_name_and_hash:
|
|
|
|
return self.possible_future_claim_amount_by_name_and_hash[(name, claim_hash)]
|
2021-06-15 18:10:28 +02:00
|
|
|
return self._cached_get_active_amount(claim_hash, ACTIVATED_CLAIM_TXO_TYPE, height or (self.height + 1))
|
2021-05-05 22:17:32 +02:00
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
def _get_pending_claim_name(self, claim_hash: bytes) -> Optional[str]:
|
2021-05-05 22:17:32 +02:00
|
|
|
assert claim_hash is not None
|
2021-07-30 01:23:29 +02:00
|
|
|
if claim_hash in self.claim_hash_to_txo:
|
2021-08-12 22:08:52 +02:00
|
|
|
return self.txo_to_claim[self.claim_hash_to_txo[claim_hash]].normalized_name
|
2021-05-20 19:31:40 +02:00
|
|
|
claim_info = self.db.get_claim_txo(claim_hash)
|
|
|
|
if claim_info:
|
2021-08-12 22:08:52 +02:00
|
|
|
return claim_info.normalized_name
|
2021-05-20 19:31:40 +02:00
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
def _get_pending_supported_amount(self, claim_hash: bytes, height: Optional[int] = None) -> int:
|
2021-06-15 18:10:28 +02:00
|
|
|
amount = self._cached_get_active_amount(claim_hash, ACTIVATED_SUPPORT_TXO_TYPE, height or (self.height + 1))
|
2021-07-01 23:23:27 +02:00
|
|
|
if claim_hash in self.activated_support_amount_by_claim:
|
|
|
|
amount += sum(self.activated_support_amount_by_claim[claim_hash])
|
|
|
|
if claim_hash in self.possible_future_support_amounts_by_claim_hash:
|
|
|
|
amount += sum(self.possible_future_support_amounts_by_claim_hash[claim_hash])
|
|
|
|
if claim_hash in self.removed_active_support_amount_by_claim:
|
|
|
|
return amount - sum(self.removed_active_support_amount_by_claim[claim_hash])
|
2021-05-20 19:31:40 +02:00
|
|
|
return amount
|
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
def _get_pending_effective_amount(self, name: str, claim_hash: bytes, height: Optional[int] = None) -> int:
|
|
|
|
claim_amount = self._get_pending_claim_amount(name, claim_hash, height=height)
|
2021-05-26 23:25:03 +02:00
|
|
|
support_amount = self._get_pending_supported_amount(claim_hash, height=height)
|
2021-05-20 19:31:40 +02:00
|
|
|
return claim_amount + support_amount
|
|
|
|
|
2021-06-28 20:20:33 +02:00
|
|
|
def _get_takeover_ops(self, height: int):
|
2021-05-20 19:31:40 +02:00
|
|
|
|
2021-05-24 18:41:44 +02:00
|
|
|
# cache for controlling claims as of the previous block
|
2021-05-20 19:31:40 +02:00
|
|
|
controlling_claims = {}
|
|
|
|
|
|
|
|
def get_controlling(_name):
|
|
|
|
if _name not in controlling_claims:
|
|
|
|
_controlling = self.db.get_controlling_claim(_name)
|
|
|
|
controlling_claims[_name] = _controlling
|
|
|
|
else:
|
|
|
|
_controlling = controlling_claims[_name]
|
|
|
|
return _controlling
|
|
|
|
|
2021-05-26 23:25:03 +02:00
|
|
|
names_with_abandoned_controlling_claims: List[str] = []
|
|
|
|
|
|
|
|
# get the claims and supports previously scheduled to be activated at this block
|
|
|
|
activated_at_height = self.db.get_activated_at_height(height)
|
|
|
|
activate_in_future = defaultdict(lambda: defaultdict(list))
|
|
|
|
future_activations = defaultdict(dict)
|
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
def get_delayed_activate_ops(name: str, claim_hash: bytes, is_new_claim: bool, tx_num: int, nout: int,
|
|
|
|
amount: int, is_support: bool) -> List['RevertableOp']:
|
|
|
|
controlling = get_controlling(name)
|
|
|
|
nothing_is_controlling = not controlling
|
|
|
|
staged_is_controlling = False if not controlling else claim_hash == controlling.claim_hash
|
|
|
|
controlling_is_abandoned = False if not controlling else \
|
|
|
|
controlling.claim_hash in names_with_abandoned_controlling_claims
|
|
|
|
|
|
|
|
if nothing_is_controlling or staged_is_controlling or controlling_is_abandoned:
|
|
|
|
delay = 0
|
|
|
|
elif is_new_claim:
|
|
|
|
delay = self.coin.get_delay_for_name(height - controlling.height)
|
|
|
|
else:
|
|
|
|
controlling_effective_amount = self._get_pending_effective_amount(name, controlling.claim_hash)
|
|
|
|
staged_effective_amount = self._get_pending_effective_amount(name, claim_hash)
|
|
|
|
staged_update_could_cause_takeover = staged_effective_amount > controlling_effective_amount
|
|
|
|
delay = 0 if not staged_update_could_cause_takeover else self.coin.get_delay_for_name(
|
|
|
|
height - controlling.height
|
|
|
|
)
|
|
|
|
if delay == 0: # if delay was 0 it needs to be considered for takeovers
|
|
|
|
activated_at_height[PendingActivationValue(claim_hash, name)].append(
|
|
|
|
PendingActivationKey(
|
|
|
|
height, ACTIVATED_SUPPORT_TXO_TYPE if is_support else ACTIVATED_CLAIM_TXO_TYPE, tx_num, nout
|
|
|
|
)
|
|
|
|
)
|
2021-05-26 23:25:03 +02:00
|
|
|
else: # if the delay was higher if still needs to be considered if something else triggers a takeover
|
|
|
|
activate_in_future[name][claim_hash].append((
|
|
|
|
PendingActivationKey(
|
|
|
|
height + delay, ACTIVATED_SUPPORT_TXO_TYPE if is_support else ACTIVATED_CLAIM_TXO_TYPE,
|
|
|
|
tx_num, nout
|
|
|
|
), amount
|
|
|
|
))
|
|
|
|
if is_support:
|
2021-07-01 23:23:27 +02:00
|
|
|
self.possible_future_support_txos_by_claim_hash[claim_hash].append((tx_num, nout))
|
2021-05-26 00:06:33 +02:00
|
|
|
return StagedActivation(
|
|
|
|
ACTIVATED_SUPPORT_TXO_TYPE if is_support else ACTIVATED_CLAIM_TXO_TYPE, claim_hash, tx_num, nout,
|
|
|
|
height + delay, name, amount
|
|
|
|
).get_activate_ops()
|
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
# determine names needing takeover/deletion due to controlling claims being abandoned
|
|
|
|
# and add ops to deactivate abandoned claims
|
2021-07-01 23:23:27 +02:00
|
|
|
for claim_hash, staged in self.abandoned_claims.items():
|
2021-08-12 22:08:52 +02:00
|
|
|
controlling = get_controlling(staged.normalized_name)
|
2021-05-20 19:31:40 +02:00
|
|
|
if controlling and controlling.claim_hash == claim_hash:
|
2021-08-12 22:08:52 +02:00
|
|
|
names_with_abandoned_controlling_claims.append(staged.normalized_name)
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\t{staged.name} needs takeover")
|
2021-05-20 19:31:40 +02:00
|
|
|
activation = self.db.get_activation(staged.tx_num, staged.position)
|
2021-05-24 18:41:44 +02:00
|
|
|
if activation > 0: # db returns -1 for non-existent txos
|
2021-05-20 19:31:40 +02:00
|
|
|
# removed queued future activation from the db
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-05-20 19:31:40 +02:00
|
|
|
StagedActivation(
|
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, staged.claim_hash, staged.tx_num, staged.position,
|
2021-08-12 22:08:52 +02:00
|
|
|
activation, staged.normalized_name, staged.amount
|
2021-05-20 19:31:40 +02:00
|
|
|
).get_remove_activate_ops()
|
|
|
|
)
|
|
|
|
else:
|
2021-05-24 18:41:44 +02:00
|
|
|
# it hadn't yet been activated
|
2021-05-20 19:31:40 +02:00
|
|
|
pass
|
|
|
|
|
2021-05-24 18:41:44 +02:00
|
|
|
# get the removed activated supports for controlling claims to determine if takeovers are possible
|
|
|
|
abandoned_support_check_need_takeover = defaultdict(list)
|
2021-07-01 23:23:27 +02:00
|
|
|
for claim_hash, amounts in self.removed_active_support_amount_by_claim.items():
|
2021-05-20 19:31:40 +02:00
|
|
|
name = self._get_pending_claim_name(claim_hash)
|
2021-06-06 19:02:52 +02:00
|
|
|
if name is None:
|
|
|
|
continue
|
2021-05-20 19:31:40 +02:00
|
|
|
controlling = get_controlling(name)
|
2021-05-24 18:41:44 +02:00
|
|
|
if controlling and controlling.claim_hash == claim_hash and \
|
|
|
|
name not in names_with_abandoned_controlling_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
abandoned_support_check_need_takeover[(name, claim_hash)].extend(amounts)
|
|
|
|
|
|
|
|
# prepare to activate or delay activation of the pending claims being added this block
|
2021-07-01 23:23:27 +02:00
|
|
|
for (tx_num, nout), staged in self.txo_to_claim.items():
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_delayed_activate_ops(
|
2021-08-12 22:08:52 +02:00
|
|
|
staged.normalized_name, staged.claim_hash, not staged.is_update, tx_num, nout, staged.amount,
|
|
|
|
is_support=False
|
2021-05-26 23:25:03 +02:00
|
|
|
))
|
2021-05-20 19:31:40 +02:00
|
|
|
|
|
|
|
# and the supports
|
2021-07-01 23:23:27 +02:00
|
|
|
for (tx_num, nout), (claim_hash, amount) in self.support_txo_to_claim.items():
|
|
|
|
if claim_hash in self.abandoned_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
continue
|
2021-07-01 23:23:27 +02:00
|
|
|
elif claim_hash in self.claim_hash_to_txo:
|
2021-08-12 22:08:52 +02:00
|
|
|
name = self.txo_to_claim[self.claim_hash_to_txo[claim_hash]].normalized_name
|
2021-07-01 23:23:27 +02:00
|
|
|
staged_is_new_claim = not self.txo_to_claim[self.claim_hash_to_txo[claim_hash]].is_update
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
2021-06-06 19:02:52 +02:00
|
|
|
supported_claim_info = self.db.get_claim_txo(claim_hash)
|
|
|
|
if not supported_claim_info:
|
|
|
|
# the supported claim doesn't exist
|
|
|
|
continue
|
|
|
|
else:
|
2021-06-09 22:29:16 +02:00
|
|
|
v = supported_claim_info
|
2021-08-12 22:08:52 +02:00
|
|
|
name = v.normalized_name
|
2021-06-09 22:29:16 +02:00
|
|
|
staged_is_new_claim = (v.root_tx_num, v.root_position) == (v.tx_num, v.position)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_delayed_activate_ops(
|
2021-05-26 23:25:03 +02:00
|
|
|
name, claim_hash, staged_is_new_claim, tx_num, nout, amount, is_support=True
|
|
|
|
))
|
2021-05-20 19:31:40 +02:00
|
|
|
|
|
|
|
# add the activation/delayed-activation ops
|
|
|
|
for activated, activated_txos in activated_at_height.items():
|
2021-08-12 22:08:52 +02:00
|
|
|
controlling = get_controlling(activated.normalized_name)
|
2021-07-01 23:23:27 +02:00
|
|
|
if activated.claim_hash in self.abandoned_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
continue
|
|
|
|
reactivate = False
|
|
|
|
if not controlling or controlling.claim_hash == activated.claim_hash:
|
|
|
|
# there is no delay for claims to a name without a controlling value or to the controlling value
|
|
|
|
reactivate = True
|
|
|
|
for activated_txo in activated_txos:
|
|
|
|
if activated_txo.is_support and (activated_txo.tx_num, activated_txo.position) in \
|
2021-08-12 22:08:52 +02:00
|
|
|
self.removed_support_txos_by_name_by_claim[activated.normalized_name][activated.claim_hash]:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print("\tskip activate support for pending abandoned claim")
|
2021-05-20 19:31:40 +02:00
|
|
|
continue
|
|
|
|
if activated_txo.is_claim:
|
|
|
|
txo_type = ACTIVATED_CLAIM_TXO_TYPE
|
|
|
|
txo_tup = (activated_txo.tx_num, activated_txo.position)
|
2021-07-01 23:23:27 +02:00
|
|
|
if txo_tup in self.txo_to_claim:
|
|
|
|
amount = self.txo_to_claim[txo_tup].amount
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
|
|
|
amount = self.db.get_claim_txo_amount(
|
2021-06-09 22:29:16 +02:00
|
|
|
activated.claim_hash
|
2021-05-20 19:31:40 +02:00
|
|
|
)
|
2021-08-12 22:08:52 +02:00
|
|
|
self.activated_claim_amount_by_name_and_hash[(activated.normalized_name, activated.claim_hash)] = amount
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
|
|
|
txo_type = ACTIVATED_SUPPORT_TXO_TYPE
|
|
|
|
txo_tup = (activated_txo.tx_num, activated_txo.position)
|
2021-07-01 23:23:27 +02:00
|
|
|
if txo_tup in self.support_txo_to_claim:
|
|
|
|
amount = self.support_txo_to_claim[txo_tup][1]
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
|
|
|
amount = self.db.get_support_txo_amount(
|
|
|
|
activated.claim_hash, activated_txo.tx_num, activated_txo.position
|
|
|
|
)
|
2021-06-06 19:02:52 +02:00
|
|
|
if amount is None:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print("\tskip activate support for non existent claim")
|
2021-06-06 19:02:52 +02:00
|
|
|
continue
|
2021-07-01 23:23:27 +02:00
|
|
|
self.activated_support_amount_by_claim[activated.claim_hash].append(amount)
|
2021-08-12 22:08:52 +02:00
|
|
|
self.activation_by_claim_by_name[activated.normalized_name][activated.claim_hash].append((activated_txo, amount))
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tactivate {'support' if txo_type == ACTIVATED_SUPPORT_TXO_TYPE else 'claim'} "
|
|
|
|
# f"{activated.claim_hash.hex()} @ {activated_txo.height}")
|
2021-05-20 19:31:40 +02:00
|
|
|
|
|
|
|
# go through claims where the controlling claim or supports to the controlling claim have been abandoned
|
|
|
|
# check if takeovers are needed or if the name node is now empty
|
|
|
|
need_reactivate_if_takes_over = {}
|
2021-05-24 18:41:44 +02:00
|
|
|
for need_takeover in names_with_abandoned_controlling_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
existing = self.db.get_claim_txos_for_name(need_takeover)
|
|
|
|
has_candidate = False
|
|
|
|
# add existing claims to the queue for the takeover
|
|
|
|
# track that we need to reactivate these if one of them becomes controlling
|
|
|
|
for candidate_claim_hash, (tx_num, nout) in existing.items():
|
2021-07-01 23:23:27 +02:00
|
|
|
if candidate_claim_hash in self.abandoned_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
continue
|
|
|
|
has_candidate = True
|
|
|
|
existing_activation = self.db.get_activation(tx_num, nout)
|
|
|
|
activate_key = PendingActivationKey(
|
|
|
|
existing_activation, ACTIVATED_CLAIM_TXO_TYPE, tx_num, nout
|
|
|
|
)
|
2021-07-01 23:23:27 +02:00
|
|
|
self.activation_by_claim_by_name[need_takeover][candidate_claim_hash].append((
|
2021-06-09 22:29:16 +02:00
|
|
|
activate_key, self.db.get_claim_txo_amount(candidate_claim_hash)
|
2021-05-05 22:17:32 +02:00
|
|
|
))
|
2021-05-20 19:31:40 +02:00
|
|
|
need_reactivate_if_takes_over[(need_takeover, candidate_claim_hash)] = activate_key
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\tcandidate to takeover abandoned controlling claim for "
|
|
|
|
# f"{activate_key.tx_num}:{activate_key.position} {activate_key.is_claim}")
|
2021-05-20 19:31:40 +02:00
|
|
|
if not has_candidate:
|
|
|
|
# remove name takeover entry, the name is now unclaimed
|
|
|
|
controlling = get_controlling(need_takeover)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_remove_name_ops(need_takeover, controlling.claim_hash, controlling.height))
|
2021-05-20 19:31:40 +02:00
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
# scan for possible takeovers out of the accumulated activations, of these make sure there
|
|
|
|
# aren't any future activations for the taken over names with yet higher amounts, if there are
|
|
|
|
# these need to get activated now and take over instead. for example:
|
|
|
|
# claim A is winning for 0.1 for long enough for a > 1 takeover delay
|
|
|
|
# claim B is made for 0.2
|
|
|
|
# a block later, claim C is made for 0.3, it will schedule to activate 1 (or rarely 2) block(s) after B
|
|
|
|
# upon the delayed activation of B, we need to detect to activate C and make it take over early instead
|
2021-06-06 19:31:24 +02:00
|
|
|
|
|
|
|
claim_exists = {}
|
2021-07-24 20:25:37 +02:00
|
|
|
for activated, activated_claim_txo in self.db.get_future_activated(height):
|
2021-05-26 00:06:33 +02:00
|
|
|
# uses the pending effective amount for the future activation height, not the current height
|
2021-05-26 23:25:03 +02:00
|
|
|
future_amount = self._get_pending_claim_amount(
|
2021-08-12 22:08:52 +02:00
|
|
|
activated.normalized_name, activated.claim_hash, activated_claim_txo.height + 1
|
2021-05-26 00:06:33 +02:00
|
|
|
)
|
2021-06-06 19:31:24 +02:00
|
|
|
if activated.claim_hash not in claim_exists:
|
2021-07-01 23:23:27 +02:00
|
|
|
claim_exists[activated.claim_hash] = activated.claim_hash in self.claim_hash_to_txo or (
|
2021-06-06 19:31:24 +02:00
|
|
|
self.db.get_claim_txo(activated.claim_hash) is not None)
|
2021-07-01 23:23:27 +02:00
|
|
|
if claim_exists[activated.claim_hash] and activated.claim_hash not in self.abandoned_claims:
|
2021-07-24 20:25:37 +02:00
|
|
|
v = future_amount, activated, activated_claim_txo
|
2021-08-12 22:08:52 +02:00
|
|
|
future_activations[activated.normalized_name][activated.claim_hash] = v
|
2021-05-26 00:06:33 +02:00
|
|
|
|
2021-05-26 23:25:03 +02:00
|
|
|
for name, future_activated in activate_in_future.items():
|
|
|
|
for claim_hash, activated in future_activated.items():
|
2021-06-06 19:31:24 +02:00
|
|
|
if claim_hash not in claim_exists:
|
2021-07-01 23:23:27 +02:00
|
|
|
claim_exists[claim_hash] = claim_hash in self.claim_hash_to_txo or (
|
2021-06-06 19:31:24 +02:00
|
|
|
self.db.get_claim_txo(claim_hash) is not None)
|
|
|
|
if not claim_exists[claim_hash]:
|
|
|
|
continue
|
2021-07-01 23:23:27 +02:00
|
|
|
if claim_hash in self.abandoned_claims:
|
2021-06-07 20:42:38 +02:00
|
|
|
continue
|
2021-05-26 23:25:03 +02:00
|
|
|
for txo in activated:
|
|
|
|
v = txo[1], PendingActivationValue(claim_hash, name), txo[0]
|
|
|
|
future_activations[name][claim_hash] = v
|
2021-05-28 20:10:35 +02:00
|
|
|
if txo[0].is_claim:
|
2021-07-01 23:23:27 +02:00
|
|
|
self.possible_future_claim_amount_by_name_and_hash[(name, claim_hash)] = txo[1]
|
2021-05-26 23:25:03 +02:00
|
|
|
else:
|
2021-07-01 23:23:27 +02:00
|
|
|
self.possible_future_support_amounts_by_claim_hash[claim_hash].append(txo[1])
|
2021-05-26 23:25:03 +02:00
|
|
|
|
2021-05-26 00:06:33 +02:00
|
|
|
# process takeovers
|
2021-05-20 19:31:40 +02:00
|
|
|
checked_names = set()
|
2021-07-01 23:23:27 +02:00
|
|
|
for name, activated in self.activation_by_claim_by_name.items():
|
2021-05-20 19:31:40 +02:00
|
|
|
checked_names.add(name)
|
|
|
|
controlling = controlling_claims[name]
|
|
|
|
amounts = {
|
|
|
|
claim_hash: self._get_pending_effective_amount(name, claim_hash)
|
2021-07-01 23:23:27 +02:00
|
|
|
for claim_hash in activated.keys() if claim_hash not in self.abandoned_claims
|
2021-05-05 22:17:32 +02:00
|
|
|
}
|
2021-05-26 00:06:33 +02:00
|
|
|
# if there is a controlling claim include it in the amounts to ensure it remains the max
|
2021-07-01 23:23:27 +02:00
|
|
|
if controlling and controlling.claim_hash not in self.abandoned_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
amounts[controlling.claim_hash] = self._get_pending_effective_amount(name, controlling.claim_hash)
|
2021-05-26 00:06:33 +02:00
|
|
|
winning_claim_hash = max(amounts, key=lambda x: amounts[x])
|
|
|
|
if not controlling or (winning_claim_hash != controlling.claim_hash and
|
2021-05-24 18:41:44 +02:00
|
|
|
name in names_with_abandoned_controlling_claims) or \
|
2021-05-26 00:06:33 +02:00
|
|
|
((winning_claim_hash != controlling.claim_hash) and (amounts[winning_claim_hash] > amounts[controlling.claim_hash])):
|
|
|
|
amounts_with_future_activations = {claim_hash: amount for claim_hash, amount in amounts.items()}
|
|
|
|
amounts_with_future_activations.update(
|
|
|
|
{
|
2021-05-26 23:25:03 +02:00
|
|
|
claim_hash: self._get_pending_effective_amount(
|
|
|
|
name, claim_hash, self.height + 1 + self.coin.maxTakeoverDelay
|
|
|
|
) for claim_hash in future_activations[name]
|
2021-05-26 00:06:33 +02:00
|
|
|
}
|
|
|
|
)
|
|
|
|
winning_including_future_activations = max(
|
|
|
|
amounts_with_future_activations, key=lambda x: amounts_with_future_activations[x]
|
|
|
|
)
|
2021-06-06 19:33:56 +02:00
|
|
|
future_winning_amount = amounts_with_future_activations[winning_including_future_activations]
|
|
|
|
|
|
|
|
if winning_claim_hash != winning_including_future_activations and \
|
|
|
|
future_winning_amount > amounts[winning_claim_hash]:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\ttakeover by {winning_claim_hash.hex()} triggered early activation and "
|
|
|
|
# f"takeover by {winning_including_future_activations.hex()} at {height}")
|
2021-05-26 23:25:03 +02:00
|
|
|
# handle a pending activated claim jumping the takeover delay when another name takes over
|
2021-07-01 23:23:27 +02:00
|
|
|
if winning_including_future_activations not in self.claim_hash_to_txo:
|
2021-05-26 23:25:03 +02:00
|
|
|
claim = self.db.get_claim_txo(winning_including_future_activations)
|
2021-06-09 22:29:16 +02:00
|
|
|
tx_num = claim.tx_num
|
|
|
|
position = claim.position
|
|
|
|
amount = claim.amount
|
2021-05-26 23:25:03 +02:00
|
|
|
activation = self.db.get_activation(tx_num, position)
|
|
|
|
else:
|
2021-07-01 23:23:27 +02:00
|
|
|
tx_num, position = self.claim_hash_to_txo[winning_including_future_activations]
|
2021-05-26 23:25:03 +02:00
|
|
|
amount = None
|
|
|
|
activation = None
|
|
|
|
for (k, tx_amount) in activate_in_future[name][winning_including_future_activations]:
|
|
|
|
if (k.tx_num, k.position) == (tx_num, position):
|
|
|
|
amount = tx_amount
|
|
|
|
activation = k.height
|
2021-05-26 23:38:18 +02:00
|
|
|
break
|
2021-05-26 23:25:03 +02:00
|
|
|
assert None not in (amount, activation)
|
2021-05-26 23:38:18 +02:00
|
|
|
# update the claim that's activating early
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-05-26 00:06:33 +02:00
|
|
|
StagedActivation(
|
2021-05-26 23:25:03 +02:00
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, winning_including_future_activations, tx_num,
|
|
|
|
position, activation, name, amount
|
2021-07-24 20:34:03 +02:00
|
|
|
).get_remove_activate_ops() + \
|
2021-05-26 00:06:33 +02:00
|
|
|
StagedActivation(
|
2021-05-26 23:25:03 +02:00
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, winning_including_future_activations, tx_num,
|
|
|
|
position, height, name, amount
|
2021-05-26 00:06:33 +02:00
|
|
|
).get_activate_ops()
|
|
|
|
)
|
2021-07-24 20:34:03 +02:00
|
|
|
|
2021-05-26 23:25:03 +02:00
|
|
|
for (k, amount) in activate_in_future[name][winning_including_future_activations]:
|
|
|
|
txo = (k.tx_num, k.position)
|
2021-07-01 23:23:27 +02:00
|
|
|
if txo in self.possible_future_support_txos_by_claim_hash[winning_including_future_activations]:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-05-26 23:38:18 +02:00
|
|
|
StagedActivation(
|
2021-07-24 20:34:03 +02:00
|
|
|
ACTIVATED_SUPPORT_TXO_TYPE, winning_including_future_activations, k.tx_num,
|
2021-05-26 23:38:18 +02:00
|
|
|
k.position, k.height, name, amount
|
2021-07-24 20:34:03 +02:00
|
|
|
).get_remove_activate_ops() + \
|
2021-05-26 23:38:18 +02:00
|
|
|
StagedActivation(
|
2021-07-24 20:34:03 +02:00
|
|
|
ACTIVATED_SUPPORT_TXO_TYPE, winning_including_future_activations, k.tx_num,
|
2021-05-26 23:38:18 +02:00
|
|
|
k.position, height, name, amount
|
|
|
|
).get_activate_ops()
|
|
|
|
)
|
2021-07-24 20:34:03 +02:00
|
|
|
|
|
|
|
self.db_op_stack.extend_ops(get_takeover_name_ops(name, winning_including_future_activations, height, controlling))
|
2021-07-16 20:46:46 +02:00
|
|
|
self.touched_claim_hashes.add(winning_including_future_activations)
|
|
|
|
if controlling and controlling.claim_hash not in self.abandoned_claims:
|
|
|
|
self.touched_claim_hashes.add(controlling.claim_hash)
|
2021-05-26 00:06:33 +02:00
|
|
|
elif not controlling or (winning_claim_hash != controlling.claim_hash and
|
|
|
|
name in names_with_abandoned_controlling_claims) or \
|
|
|
|
((winning_claim_hash != controlling.claim_hash) and (amounts[winning_claim_hash] > amounts[controlling.claim_hash])):
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\ttakeover by {winning_claim_hash.hex()} at {height}")
|
2021-05-26 00:06:33 +02:00
|
|
|
if (name, winning_claim_hash) in need_reactivate_if_takes_over:
|
|
|
|
previous_pending_activate = need_reactivate_if_takes_over[(name, winning_claim_hash)]
|
|
|
|
amount = self.db.get_claim_txo_amount(
|
2021-06-09 22:29:16 +02:00
|
|
|
winning_claim_hash
|
2021-05-20 19:31:40 +02:00
|
|
|
)
|
2021-07-01 23:23:27 +02:00
|
|
|
if winning_claim_hash in self.claim_hash_to_txo:
|
|
|
|
tx_num, position = self.claim_hash_to_txo[winning_claim_hash]
|
|
|
|
amount = self.txo_to_claim[(tx_num, position)].amount
|
2021-05-26 00:06:33 +02:00
|
|
|
else:
|
|
|
|
tx_num, position = previous_pending_activate.tx_num, previous_pending_activate.position
|
|
|
|
if previous_pending_activate.height > height:
|
|
|
|
# the claim had a pending activation in the future, move it to now
|
2021-06-22 23:25:23 +02:00
|
|
|
if tx_num < self.tx_count:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-06-22 23:25:23 +02:00
|
|
|
StagedActivation(
|
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, winning_claim_hash, tx_num,
|
|
|
|
position, previous_pending_activate.height, name, amount
|
|
|
|
).get_remove_activate_ops()
|
|
|
|
)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-05-26 00:06:33 +02:00
|
|
|
StagedActivation(
|
|
|
|
ACTIVATED_CLAIM_TXO_TYPE, winning_claim_hash, tx_num,
|
|
|
|
position, height, name, amount
|
|
|
|
).get_activate_ops()
|
|
|
|
)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_takeover_name_ops(name, winning_claim_hash, height, controlling))
|
2021-07-16 20:46:46 +02:00
|
|
|
if controlling and controlling.claim_hash not in self.abandoned_claims:
|
|
|
|
self.touched_claim_hashes.add(controlling.claim_hash)
|
|
|
|
self.touched_claim_hashes.add(winning_claim_hash)
|
2021-05-26 00:06:33 +02:00
|
|
|
elif winning_claim_hash == controlling.claim_hash:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print("\tstill winning")
|
2021-05-26 00:06:33 +02:00
|
|
|
pass
|
2021-05-20 19:31:40 +02:00
|
|
|
else:
|
2021-06-15 17:53:03 +02:00
|
|
|
# print("\tno takeover")
|
2021-05-26 00:06:33 +02:00
|
|
|
pass
|
2021-05-05 22:17:32 +02:00
|
|
|
|
2021-05-20 19:31:40 +02:00
|
|
|
# handle remaining takeovers from abandoned supports
|
|
|
|
for (name, claim_hash), amounts in abandoned_support_check_need_takeover.items():
|
|
|
|
if name in checked_names:
|
|
|
|
continue
|
|
|
|
checked_names.add(name)
|
|
|
|
controlling = get_controlling(name)
|
|
|
|
amounts = {
|
|
|
|
claim_hash: self._get_pending_effective_amount(name, claim_hash)
|
2021-07-01 23:23:27 +02:00
|
|
|
for claim_hash in self.db.get_claims_for_name(name) if claim_hash not in self.abandoned_claims
|
2021-05-20 19:31:40 +02:00
|
|
|
}
|
2021-07-01 23:23:27 +02:00
|
|
|
if controlling and controlling.claim_hash not in self.abandoned_claims:
|
2021-05-20 19:31:40 +02:00
|
|
|
amounts[controlling.claim_hash] = self._get_pending_effective_amount(name, controlling.claim_hash)
|
|
|
|
winning = max(amounts, key=lambda x: amounts[x])
|
|
|
|
if (controlling and winning != controlling.claim_hash) or (not controlling and winning):
|
2021-06-15 17:53:03 +02:00
|
|
|
# print(f"\ttakeover from abandoned support {controlling.claim_hash.hex()} -> {winning.hex()}")
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_takeover_name_ops(name, winning, height, controlling))
|
2021-07-16 20:46:46 +02:00
|
|
|
if controlling:
|
|
|
|
self.touched_claim_hashes.add(controlling.claim_hash)
|
|
|
|
self.touched_claim_hashes.add(winning)
|
2021-05-27 19:35:41 +02:00
|
|
|
|
2021-09-03 06:33:40 +02:00
|
|
|
def _add_claim_activation_change_notification(self, claim_id: str, height: int, added: bool, prev_amount: int,
|
|
|
|
new_amount: int):
|
|
|
|
self.activation_info_to_send_es[claim_id].append(TrendingNotification(height, added, prev_amount, new_amount))
|
|
|
|
|
|
|
|
def _get_cumulative_update_ops(self, height: int):
|
2021-05-27 19:35:41 +02:00
|
|
|
# gather cumulative removed/touched sets to update the search index
|
2021-07-16 20:46:46 +02:00
|
|
|
self.removed_claim_hashes.update(set(self.abandoned_claims.keys()))
|
2021-09-03 06:33:40 +02:00
|
|
|
self.touched_claim_hashes.difference_update(self.removed_claim_hashes)
|
2021-07-16 20:46:46 +02:00
|
|
|
self.touched_claim_hashes.update(
|
2021-09-03 06:33:40 +02:00
|
|
|
set(
|
|
|
|
map(lambda item: item[1], self.activated_claim_amount_by_name_and_hash.keys())
|
|
|
|
).union(
|
|
|
|
set(self.claim_hash_to_txo.keys())
|
|
|
|
).union(
|
|
|
|
self.removed_active_support_amount_by_claim.keys()
|
|
|
|
).union(
|
|
|
|
self.signatures_changed
|
|
|
|
).union(
|
2021-07-01 23:23:27 +02:00
|
|
|
set(self.removed_active_support_amount_by_claim.keys())
|
2021-09-03 06:33:40 +02:00
|
|
|
).union(
|
|
|
|
set(self.activated_support_amount_by_claim.keys())
|
|
|
|
).difference(
|
|
|
|
self.removed_claim_hashes
|
|
|
|
)
|
2021-05-27 19:35:41 +02:00
|
|
|
)
|
|
|
|
|
2021-06-15 17:53:03 +02:00
|
|
|
# use the cumulative changes to update bid ordered resolve
|
2021-07-16 20:46:46 +02:00
|
|
|
for removed in self.removed_claim_hashes:
|
2021-05-27 19:35:41 +02:00
|
|
|
removed_claim = self.db.get_claim_txo(removed)
|
2021-07-01 02:09:17 +02:00
|
|
|
if removed_claim:
|
|
|
|
amt = self.db.get_url_effective_amount(
|
2021-08-12 22:08:52 +02:00
|
|
|
removed_claim.normalized_name, removed
|
2021-07-01 02:09:17 +02:00
|
|
|
)
|
2021-07-01 23:23:27 +02:00
|
|
|
if amt:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_remove_effective_amount_ops(
|
2021-08-12 22:08:52 +02:00
|
|
|
removed_claim.normalized_name, amt.effective_amount, amt.tx_num,
|
2021-07-01 23:23:27 +02:00
|
|
|
amt.position, removed
|
2021-07-01 02:09:17 +02:00
|
|
|
))
|
2021-07-16 20:46:46 +02:00
|
|
|
for touched in self.touched_claim_hashes:
|
2021-09-03 06:33:40 +02:00
|
|
|
prev_effective_amount = 0
|
|
|
|
|
2021-07-01 23:23:27 +02:00
|
|
|
if touched in self.claim_hash_to_txo:
|
|
|
|
pending = self.txo_to_claim[self.claim_hash_to_txo[touched]]
|
2021-08-12 22:08:52 +02:00
|
|
|
name, tx_num, position = pending.normalized_name, pending.tx_num, pending.position
|
2021-05-27 19:35:41 +02:00
|
|
|
claim_from_db = self.db.get_claim_txo(touched)
|
|
|
|
if claim_from_db:
|
2021-07-01 23:23:27 +02:00
|
|
|
claim_amount_info = self.db.get_url_effective_amount(name, touched)
|
|
|
|
if claim_amount_info:
|
2021-09-03 06:33:40 +02:00
|
|
|
prev_effective_amount = claim_amount_info.effective_amount
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(get_remove_effective_amount_ops(
|
2021-07-01 23:23:27 +02:00
|
|
|
name, claim_amount_info.effective_amount, claim_amount_info.tx_num,
|
|
|
|
claim_amount_info.position, touched
|
2021-06-22 23:25:23 +02:00
|
|
|
))
|
2021-05-27 19:35:41 +02:00
|
|
|
else:
|
2021-06-09 22:29:16 +02:00
|
|
|
v = self.db.get_claim_txo(touched)
|
2021-07-01 02:09:17 +02:00
|
|
|
if not v:
|
|
|
|
continue
|
2021-08-12 22:08:52 +02:00
|
|
|
name, tx_num, position = v.normalized_name, v.tx_num, v.position
|
2021-07-01 23:23:27 +02:00
|
|
|
amt = self.db.get_url_effective_amount(name, touched)
|
|
|
|
if amt:
|
2021-09-03 06:33:40 +02:00
|
|
|
prev_effective_amount = amt.effective_amount
|
|
|
|
self.db_op_stack.extend_ops(
|
|
|
|
get_remove_effective_amount_ops(
|
|
|
|
name, amt.effective_amount, amt.tx_num, amt.position, touched
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if (name, touched) in self.activated_claim_amount_by_name_and_hash:
|
|
|
|
self._add_claim_activation_change_notification(
|
|
|
|
touched.hex(), height, True, prev_effective_amount,
|
|
|
|
self.activated_claim_amount_by_name_and_hash[(name, touched)]
|
|
|
|
)
|
|
|
|
if touched in self.activated_support_amount_by_claim:
|
|
|
|
for support_amount in self.activated_support_amount_by_claim[touched]:
|
|
|
|
self._add_claim_activation_change_notification(
|
|
|
|
touched.hex(), height, True, prev_effective_amount, support_amount
|
|
|
|
)
|
|
|
|
if touched in self.removed_active_support_amount_by_claim:
|
|
|
|
for support_amount in self.removed_active_support_amount_by_claim[touched]:
|
|
|
|
self._add_claim_activation_change_notification(
|
|
|
|
touched.hex(), height, False, prev_effective_amount, support_amount
|
|
|
|
)
|
|
|
|
new_effective_amount = self._get_pending_effective_amount(name, touched)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops(
|
2021-09-03 06:33:40 +02:00
|
|
|
get_add_effective_amount_ops(
|
|
|
|
name, new_effective_amount, tx_num, position, touched
|
|
|
|
)
|
2021-06-28 20:20:33 +02:00
|
|
|
)
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2021-07-16 20:46:46 +02:00
|
|
|
self.touched_claim_hashes.update(
|
|
|
|
{k for k in self.pending_reposted if k not in self.removed_claim_hashes}
|
|
|
|
)
|
|
|
|
self.touched_claim_hashes.update(
|
|
|
|
{k for k, v in self.pending_channel_counts.items() if v != 0 and k not in self.removed_claim_hashes}
|
|
|
|
)
|
|
|
|
self.touched_claims_to_send_es.difference_update(self.removed_claim_hashes)
|
|
|
|
self.touched_claims_to_send_es.update(self.touched_claim_hashes)
|
|
|
|
self.removed_claims_to_send_es.update(self.removed_claim_hashes)
|
|
|
|
|
2021-07-27 00:07:16 +02:00
|
|
|
def advance_block(self, block):
|
2021-05-05 22:04:48 +02:00
|
|
|
height = self.height + 1
|
2021-06-15 17:53:03 +02:00
|
|
|
# print("advance ", height)
|
2020-11-09 21:34:42 +01:00
|
|
|
# Use local vars for speed in the loops
|
2021-07-24 20:34:03 +02:00
|
|
|
tx_count = self.tx_count
|
2020-11-09 21:34:42 +01:00
|
|
|
spend_utxo = self.spend_utxo
|
2021-07-14 19:09:57 +02:00
|
|
|
add_utxo = self.add_utxo
|
|
|
|
spend_claim_or_support_txo = self._spend_claim_or_support_txo
|
|
|
|
add_claim_or_support = self._add_claim_or_support
|
2021-07-24 20:34:03 +02:00
|
|
|
txs: List[Tuple[Tx, bytes]] = block.transactions
|
|
|
|
|
2021-08-14 02:02:42 +02:00
|
|
|
self.db.prefix_db.block_hash.stage_put(key_args=(height,), value_args=(self.coin.header_hash(block.header),))
|
|
|
|
self.db.prefix_db.header.stage_put(key_args=(height,), value_args=(block.header,))
|
2020-11-09 21:34:42 +01:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
for tx, tx_hash in txs:
|
2021-02-19 19:19:58 +01:00
|
|
|
spent_claims = {}
|
2021-06-04 22:50:37 +02:00
|
|
|
txos = Transaction(tx.raw).outputs
|
|
|
|
|
2021-08-14 02:02:42 +02:00
|
|
|
self.db.prefix_db.tx.stage_put(key_args=(tx_hash,), value_args=(tx.raw,))
|
|
|
|
self.db.prefix_db.tx_num.stage_put(key_args=(tx_hash,), value_args=(tx_count,))
|
|
|
|
self.db.prefix_db.tx_hash.stage_put(key_args=(tx_count,), value_args=(tx_hash,))
|
2021-07-14 19:09:57 +02:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
# Spend the inputs
|
|
|
|
for txin in tx.inputs:
|
|
|
|
if txin.is_generation():
|
|
|
|
continue
|
2021-02-19 19:19:58 +01:00
|
|
|
# spend utxo for address histories
|
2021-07-14 19:09:57 +02:00
|
|
|
hashX = spend_utxo(txin.prev_hash, txin.prev_idx)
|
|
|
|
if hashX:
|
2021-07-22 18:21:55 +02:00
|
|
|
if tx_count not in self.hashXs_by_tx[hashX]:
|
|
|
|
self.hashXs_by_tx[hashX].append(tx_count)
|
2021-07-14 19:09:57 +02:00
|
|
|
# spend claim/support txo
|
2021-08-30 18:16:07 +02:00
|
|
|
spend_claim_or_support_txo(height, txin, spent_claims)
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
# Add the new UTXOs
|
2021-05-20 19:31:40 +02:00
|
|
|
for nout, txout in enumerate(tx.outputs):
|
2019-12-31 00:47:37 +01:00
|
|
|
# Get the hashX. Ignore unspendable outputs
|
2021-07-14 19:09:57 +02:00
|
|
|
hashX = add_utxo(tx_hash, tx_count, nout, txout)
|
2019-12-31 00:47:37 +01:00
|
|
|
if hashX:
|
2021-07-14 19:09:57 +02:00
|
|
|
# self._set_hashX_cache(hashX)
|
|
|
|
if tx_count not in self.hashXs_by_tx[hashX]:
|
|
|
|
self.hashXs_by_tx[hashX].append(tx_count)
|
2021-02-19 19:19:58 +01:00
|
|
|
# add claim/support txo
|
2021-07-14 19:09:57 +02:00
|
|
|
add_claim_or_support(
|
2021-06-04 22:50:37 +02:00
|
|
|
height, tx_hash, tx_count, nout, txos[nout], spent_claims
|
2021-02-19 19:19:58 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Handle abandoned claims
|
2021-07-20 21:10:45 +02:00
|
|
|
abandoned_channels = {}
|
|
|
|
# abandon the channels last to handle abandoned signed claims in the same tx,
|
|
|
|
# see test_abandon_channel_and_claims_in_same_tx
|
2021-08-12 22:08:52 +02:00
|
|
|
for abandoned_claim_hash, (tx_num, nout, normalized_name) in spent_claims.items():
|
|
|
|
if normalized_name.startswith('@'):
|
|
|
|
abandoned_channels[abandoned_claim_hash] = (tx_num, nout, normalized_name)
|
2021-07-20 21:10:45 +02:00
|
|
|
else:
|
2021-08-12 22:08:52 +02:00
|
|
|
# print(f"\tabandon {normalized_name} {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
|
|
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, normalized_name)
|
2021-07-20 21:10:45 +02:00
|
|
|
|
2021-08-12 22:08:52 +02:00
|
|
|
for abandoned_claim_hash, (tx_num, nout, normalized_name) in abandoned_channels.items():
|
|
|
|
# print(f"\tabandon {normalized_name} {abandoned_claim_hash.hex()} {tx_num} {nout}")
|
|
|
|
self._abandon_claim(abandoned_claim_hash, tx_num, nout, normalized_name)
|
2021-02-19 19:19:58 +01:00
|
|
|
|
2020-11-25 22:08:04 +01:00
|
|
|
self.db.total_transactions.append(tx_hash)
|
2021-02-19 19:19:58 +01:00
|
|
|
self.db.transaction_num_mapping[tx_hash] = tx_count
|
|
|
|
tx_count += 1
|
2019-12-31 00:47:37 +01:00
|
|
|
|
2021-02-21 23:26:13 +01:00
|
|
|
# handle expired claims
|
2021-07-02 23:04:29 +02:00
|
|
|
self._expire_claims(height)
|
2021-02-21 23:26:13 +01:00
|
|
|
|
2021-05-05 22:17:32 +02:00
|
|
|
# activate claims and process takeovers
|
2021-06-28 20:20:33 +02:00
|
|
|
self._get_takeover_ops(height)
|
2021-05-05 22:17:32 +02:00
|
|
|
|
2021-07-16 20:46:46 +02:00
|
|
|
# update effective amount and update sets of touched and deleted claims
|
2021-09-03 06:33:40 +02:00
|
|
|
self._get_cumulative_update_ops(height)
|
2021-07-16 20:46:46 +02:00
|
|
|
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.append_op(RevertablePut(*Prefixes.tx_count.pack_item(height, tx_count)))
|
2021-07-14 19:09:57 +02:00
|
|
|
|
|
|
|
for hashX, new_history in self.hashXs_by_tx.items():
|
|
|
|
if not new_history:
|
|
|
|
continue
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.append_op(
|
2021-07-14 19:09:57 +02:00
|
|
|
RevertablePut(
|
|
|
|
*Prefixes.hashX_history.pack_item(
|
|
|
|
hashX, height, new_history
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2021-02-19 19:19:58 +01:00
|
|
|
self.tx_count = tx_count
|
|
|
|
self.db.tx_counts.append(self.tx_count)
|
|
|
|
|
2021-07-16 20:46:46 +02:00
|
|
|
cached_max_reorg_depth = self.daemon.cached_height() - self.env.reorg_limit
|
|
|
|
if height >= cached_max_reorg_depth:
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.append_op(
|
2021-07-16 20:46:46 +02:00
|
|
|
RevertablePut(
|
|
|
|
*Prefixes.touched_or_deleted.pack_item(
|
|
|
|
height, self.touched_claim_hashes, self.removed_claim_hashes
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.append_op(
|
2021-07-16 20:46:46 +02:00
|
|
|
RevertablePut(
|
|
|
|
*Prefixes.undo.pack_item(height, self.db_op_stack.get_undo_ops())
|
|
|
|
)
|
|
|
|
)
|
2021-05-05 22:04:48 +02:00
|
|
|
|
|
|
|
self.height = height
|
2021-07-14 19:09:57 +02:00
|
|
|
self.db.headers.append(block.header)
|
2021-05-05 22:04:48 +02:00
|
|
|
self.tip = self.coin.header_hash(block.header)
|
|
|
|
|
2021-07-14 19:09:57 +02:00
|
|
|
def clear_after_advance_or_reorg(self):
|
2021-07-01 23:23:27 +02:00
|
|
|
self.db_op_stack.clear()
|
|
|
|
self.txo_to_claim.clear()
|
|
|
|
self.claim_hash_to_txo.clear()
|
|
|
|
self.support_txos_by_claim.clear()
|
|
|
|
self.support_txo_to_claim.clear()
|
|
|
|
self.removed_support_txos_by_name_by_claim.clear()
|
|
|
|
self.abandoned_claims.clear()
|
|
|
|
self.removed_active_support_amount_by_claim.clear()
|
|
|
|
self.activated_support_amount_by_claim.clear()
|
|
|
|
self.activated_claim_amount_by_name_and_hash.clear()
|
|
|
|
self.activation_by_claim_by_name.clear()
|
|
|
|
self.possible_future_claim_amount_by_name_and_hash.clear()
|
|
|
|
self.possible_future_support_amounts_by_claim_hash.clear()
|
|
|
|
self.possible_future_support_txos_by_claim_hash.clear()
|
2021-06-04 22:50:37 +02:00
|
|
|
self.pending_channels.clear()
|
2021-06-15 18:10:28 +02:00
|
|
|
self.amount_cache.clear()
|
2021-06-18 03:20:57 +02:00
|
|
|
self.signatures_changed.clear()
|
2021-07-02 23:03:51 +02:00
|
|
|
self.expired_claim_hashes.clear()
|
2021-07-06 23:56:18 +02:00
|
|
|
self.doesnt_have_valid_signature.clear()
|
|
|
|
self.claim_channels.clear()
|
2021-07-14 19:09:57 +02:00
|
|
|
self.utxo_cache.clear()
|
|
|
|
self.hashXs_by_tx.clear()
|
2021-05-05 22:04:48 +02:00
|
|
|
self.history_cache.clear()
|
2021-07-16 20:51:10 +02:00
|
|
|
self.mempool.notified_mempool_txs.clear()
|
2021-07-16 20:46:46 +02:00
|
|
|
self.removed_claim_hashes.clear()
|
|
|
|
self.touched_claim_hashes.clear()
|
|
|
|
self.pending_reposted.clear()
|
|
|
|
self.pending_channel_counts.clear()
|
2019-12-31 00:47:37 +01:00
|
|
|
|
2021-07-24 20:36:49 +02:00
|
|
|
async def backup_block(self):
|
|
|
|
# self.db.assert_flushed(self.flush_data())
|
2021-07-14 19:09:57 +02:00
|
|
|
self.logger.info("backup block %i", self.height)
|
|
|
|
# Check and update self.tip
|
2021-07-16 20:46:46 +02:00
|
|
|
undo_ops, touched_and_deleted_bytes = self.db.read_undo_info(self.height)
|
2021-07-14 19:09:57 +02:00
|
|
|
if undo_ops is None:
|
2019-12-31 00:47:37 +01:00
|
|
|
raise ChainError(f'no undo information found for height {self.height:,d}')
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.append_op(RevertableDelete(Prefixes.undo.pack_key(self.height), undo_ops))
|
2021-07-16 20:46:46 +02:00
|
|
|
self.db_op_stack.apply_packed_undo_ops(undo_ops)
|
|
|
|
|
|
|
|
touched_and_deleted = Prefixes.touched_or_deleted.unpack_value(touched_and_deleted_bytes)
|
|
|
|
self.touched_claims_to_send_es.update(touched_and_deleted.touched_claims)
|
|
|
|
self.removed_claims_to_send_es.difference_update(touched_and_deleted.touched_claims)
|
|
|
|
self.removed_claims_to_send_es.update(touched_and_deleted.deleted_claims)
|
|
|
|
|
2021-07-14 19:09:57 +02:00
|
|
|
self.db.headers.pop()
|
|
|
|
self.db.tx_counts.pop()
|
|
|
|
self.tip = self.coin.header_hash(self.db.headers[-1])
|
|
|
|
while len(self.db.total_transactions) > self.db.tx_counts[-1]:
|
2021-02-19 19:22:07 +01:00
|
|
|
self.db.transaction_num_mapping.pop(self.db.total_transactions.pop())
|
2021-07-14 19:09:57 +02:00
|
|
|
self.tx_count -= 1
|
|
|
|
self.height -= 1
|
|
|
|
# self.touched can include other addresses which is
|
|
|
|
# harmless, but remove None.
|
2021-07-22 22:10:30 +02:00
|
|
|
self.touched_hashXs.discard(None)
|
2021-07-14 19:09:57 +02:00
|
|
|
|
|
|
|
def add_utxo(self, tx_hash: bytes, tx_num: int, nout: int, txout: 'TxOutput') -> Optional[bytes]:
|
|
|
|
hashX = self.coin.hashX_from_script(txout.pk_script)
|
|
|
|
if hashX:
|
2021-07-22 22:10:30 +02:00
|
|
|
self.touched_hashXs.add(hashX)
|
2021-07-26 03:45:42 +02:00
|
|
|
self.utxo_cache[(tx_hash, nout)] = (hashX, txout.value)
|
2021-07-24 20:34:03 +02:00
|
|
|
self.db_op_stack.extend_ops([
|
2021-07-14 19:09:57 +02:00
|
|
|
RevertablePut(
|
|
|
|
*Prefixes.utxo.pack_item(hashX, tx_num, nout, txout.value)
|
|
|
|
),
|
|
|
|
RevertablePut(
|
|
|
|
*Prefixes.hashX_utxo.pack_item(tx_hash[:4], tx_num, nout, hashX)
|
|
|
|
)
|
|
|
|
])
|
|
|
|
return hashX
|
2020-12-16 07:27:03 +01:00
|
|
|
|
2021-07-14 19:09:57 +02:00
|
|
|
def spend_utxo(self, tx_hash: bytes, nout: int):
|
2021-07-26 03:45:42 +02:00
|
|
|
hashX, amount = self.utxo_cache.pop((tx_hash, nout), (None, None))
|
2021-07-24 20:29:01 +02:00
|
|
|
txin_num = self.db.transaction_num_mapping[tx_hash]
|
|
|
|
hdb_key = Prefixes.hashX_utxo.pack_key(tx_hash[:4], txin_num, nout)
|
2021-07-26 03:45:42 +02:00
|
|
|
if not hashX:
|
|
|
|
hashX = self.db.db.get(hdb_key)
|
|
|
|
if not hashX:
|
|
|
|
return
|
2021-07-24 20:29:01 +02:00
|
|
|
udb_key = Prefixes.utxo.pack_key(hashX, txin_num, nout)
|
2021-01-09 20:39:20 +01:00
|
|
|
utxo_value_packed = self.db.db.get(udb_key)
|
2020-12-16 07:26:19 +01:00
|
|
|
if utxo_value_packed is None:
|
|
|
|
self.logger.warning(
|
2021-07-14 19:09:57 +02:00
|
|
|
"%s:%s is not found in UTXO db for %s", hash_to_hex_str(tx_hash), nout, hash_to_hex_str(hashX)
|
2020-12-16 07:26:19 +01:00
|
|
|
)
|
2021-07-24 20:29:01 +02:00
|
|
|
raise ChainError(
|
|
|
|
f"{hash_to_hex_str(tx_hash)}:{nout} is not found in UTXO db for {hash_to_hex_str(hashX)}"
|
|
|
|
)
|
2021-07-22 22:10:30 +02:00
|
|
|
self.touched_hashXs.add(hashX)
|
2021-07-24 20:29:01 +02:00
|
|
|
self.db_op_stack.extend_ops([
|
2021-07-14 19:09:57 +02:00
|
|
|
RevertableDelete(hdb_key, hashX),
|
|
|
|
RevertableDelete(udb_key, utxo_value_packed)
|
|
|
|
])
|
|
|
|
return hashX
|
2021-07-26 03:45:42 +02:00
|
|
|
elif amount is not None:
|
|
|
|
udb_key = Prefixes.utxo.pack_key(hashX, txin_num, nout)
|
|
|
|
self.touched_hashXs.add(hashX)
|
|
|
|
self.db_op_stack.extend_ops([
|
|
|
|
RevertableDelete(hdb_key, hashX),
|
|
|
|
RevertableDelete(udb_key, Prefixes.utxo.pack_value(amount))
|
|
|
|
])
|
|
|
|
return hashX
|
2019-12-31 00:47:37 +01:00
|
|
|
|
|
|
|
async def _process_prefetched_blocks(self):
|
|
|
|
"""Loop forever processing blocks as they arrive."""
|
|
|
|
while True:
|
|
|
|
if self.height == self.daemon.cached_height():
|
|
|
|
if not self._caught_up_event.is_set():
|
|
|
|
await self._first_caught_up()
|
|
|
|
self._caught_up_event.set()
|
|
|
|
await self.blocks_event.wait()
|
|
|
|
self.blocks_event.clear()
|
2021-07-14 19:09:57 +02:00
|
|
|
blocks = self.prefetcher.get_prefetched_blocks()
|
|
|
|
try:
|
|
|
|
await self.check_and_advance_blocks(blocks)
|
|
|
|
except Exception:
|
|
|
|
self.logger.exception("error while processing txs")
|
|
|
|
raise
|
2019-12-31 00:47:37 +01:00
|
|
|
|
|
|
|
async def _first_caught_up(self):
|
|
|
|
self.logger.info(f'caught up to height {self.height}')
|
|
|
|
# Flush everything but with first_sync->False state.
|
|
|
|
first_sync = self.db.first_sync
|
|
|
|
self.db.first_sync = False
|
2021-07-14 19:09:57 +02:00
|
|
|
await self.write_state()
|
2019-12-31 00:47:37 +01:00
|
|
|
if first_sync:
|
2019-12-31 20:52:57 +01:00
|
|
|
self.logger.info(f'{lbry.__version__} synced to '
|
2021-07-08 20:29:47 +02:00
|
|
|
f'height {self.height:,d}, halting here.')
|
|
|
|
self.shutdown_event.set()
|
2019-12-31 00:47:37 +01:00
|
|
|
|
|
|
|
async def fetch_and_process_blocks(self, caught_up_event):
|
|
|
|
"""Fetch, process and index blocks from the daemon.
|
|
|
|
|
|
|
|
Sets caught_up_event when first caught up. Flushes to disk
|
|
|
|
and shuts down cleanly if cancelled.
|
|
|
|
|
|
|
|
This is mainly because if, during initial sync ElectrumX is
|
|
|
|
asked to shut down when a large number of blocks have been
|
|
|
|
processed but not written to disk, it should write those to
|
|
|
|
disk before exiting, as otherwise a significant amount of work
|
|
|
|
could be lost.
|
|
|
|
"""
|
2021-01-21 22:08:33 +01:00
|
|
|
|
2019-12-31 00:47:37 +01:00
|
|
|
self._caught_up_event = caught_up_event
|
|
|
|
try:
|
2021-06-06 19:08:15 +02:00
|
|
|
await self.db.open_dbs()
|
2021-08-14 02:02:42 +02:00
|
|
|
self.db_op_stack = self.db.db_op_stack
|
2021-06-06 19:08:15 +02:00
|
|
|
self.height = self.db.db_height
|
|
|
|
self.tip = self.db.db_tip
|
|
|
|
self.tx_count = self.db.db_tx_count
|
2021-01-21 22:08:33 +01:00
|
|
|
self.status_server.set_height(self.db.fs_height, self.db.db_tip)
|
2019-12-31 00:47:37 +01:00
|
|
|
await asyncio.wait([
|
|
|
|
self.prefetcher.main_loop(self.height),
|
|
|
|
self._process_prefetched_blocks()
|
|
|
|
])
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
self.logger.exception("Block processing failed!")
|
|
|
|
raise
|
|
|
|
finally:
|
2021-01-21 22:08:33 +01:00
|
|
|
self.status_server.stop()
|
2019-12-31 00:47:37 +01:00
|
|
|
# Shut down block processing
|
2021-07-08 22:08:33 +02:00
|
|
|
self.logger.info('closing the DB for a clean shutdown...')
|
2019-12-31 00:47:37 +01:00
|
|
|
self.db.close()
|
2021-07-22 23:33:54 +02:00
|
|
|
# self.executor.shutdown(wait=True)
|