refactored

This commit is contained in:
Lex Berezhny 2020-07-05 23:03:45 -04:00
parent 3fe1981657
commit 7d9bf03574
13 changed files with 750 additions and 793 deletions

View file

@ -86,7 +86,7 @@ class BlockchainDB:
return await self.run_in_executor(self.sync_execute, sql, *args) return await self.run_in_executor(self.sync_execute, sql, *args)
def sync_execute_fetchall(self, sql: str, *args) -> List[dict]: def sync_execute_fetchall(self, sql: str, *args) -> List[dict]:
return [dict(r) for r in self.connection.execute(sql, *args).fetchall()] return self.connection.execute(sql, *args).fetchall()
async def execute_fetchall(self, sql: str, *args) -> List[dict]: async def execute_fetchall(self, sql: str, *args) -> List[dict]:
return await self.run_in_executor(self.sync_execute_fetchall, sql, *args) return await self.run_in_executor(self.sync_execute_fetchall, sql, *args)
@ -112,7 +112,7 @@ class BlockchainDB:
if file_number is not None and start_height is not None: if file_number is not None and start_height is not None:
sql += "AND file = ? AND height >= ?" sql += "AND file = ? AND height >= ?"
args = (file_number, start_height) args = (file_number, start_height)
return self.sync_execute_fetchall(sql + " GROUP BY file ORDER BY file ASC;", args) return [dict(r) for r in self.sync_execute_fetchall(sql + " GROUP BY file ORDER BY file ASC;", args)]
async def get_block_files(self, file_number: int = None, start_height: int = None) -> List[dict]: async def get_block_files(self, file_number: int = None, start_height: int = None) -> List[dict]:
return await self.run_in_executor( return await self.run_in_executor(
@ -120,14 +120,14 @@ class BlockchainDB:
) )
def sync_get_blocks_in_file(self, block_file: int, start_height=0) -> List[dict]: def sync_get_blocks_in_file(self, block_file: int, start_height=0) -> List[dict]:
return self.sync_execute_fetchall( return [dict(r) for r in self.sync_execute_fetchall(
""" """
SELECT datapos as data_offset, height, hash as block_hash, txCount as txs SELECT datapos as data_offset, height, hash as block_hash, txCount as txs
FROM block_info FROM block_info
WHERE file = ? AND height >= ? AND status&1 AND status&4 WHERE file = ? AND height >= ? AND status&1 AND status&4
ORDER BY datapos ASC; ORDER BY datapos ASC;
""", (block_file, start_height) """, (block_file, start_height)
) )]
async def get_blocks_in_file(self, block_file: int, start_height=0) -> List[dict]: async def get_blocks_in_file(self, block_file: int, start_height=0) -> List[dict]:
return await self.run_in_executor(self.sync_get_blocks_in_file, block_file, start_height) return await self.run_in_executor(self.sync_get_blocks_in_file, block_file, start_height)
@ -145,24 +145,27 @@ class BlockchainDB:
} }
def sync_get_takeover_count(self, start_height: int, end_height: int) -> int: def sync_get_takeover_count(self, start_height: int, end_height: int) -> int:
sql = "SELECT COUNT(*) FROM takeover WHERE height BETWEEN ? AND ?" sql = """
return self.connection.execute(sql, (start_height, end_height)).fetchone()[0] SELECT COUNT(*) FROM claim WHERE name IN (
SELECT name FROM takeover WHERE claimID IS NOT NULL AND height BETWEEN ? AND ?
)
""", (start_height, end_height)
return self.connection.execute(*sql).fetchone()[0]
async def get_takeover_count(self, start_height: int, end_height: int) -> int: async def get_takeover_count(self, start_height: int, end_height: int) -> int:
return await self.run_in_executor(self.sync_get_takeover_count, start_height, end_height) return await self.run_in_executor(self.sync_get_takeover_count, start_height, end_height)
def sync_get_takeovers(self, start_height: int, end_height: int) -> List[dict]: def sync_get_takeovers(self, start_height: int, end_height: int) -> List[dict]:
return self.sync_execute_fetchall( sql = """
""" SELECT name, claimID, MAX(height) AS height FROM takeover
SELECT WHERE claimID IS NOT NULL AND height BETWEEN ? AND ?
takeover.name, GROUP BY name
takeover.claimID AS claim_hash,
takeover.height
FROM takeover
WHERE height BETWEEN ? AND ?
ORDER BY height, name
""", (start_height, end_height) """, (start_height, end_height)
) return [{
'normalized': normalize_name(r['name'].decode()),
'claim_hash': r['claimID'],
'height': r['height']
} for r in self.sync_execute_fetchall(*sql)]
async def get_takeovers(self, start_height: int, end_height: int) -> List[dict]: async def get_takeovers(self, start_height: int, end_height: int) -> List[dict]:
return await self.run_in_executor(self.sync_get_takeovers, start_height, end_height) return await self.run_in_executor(self.sync_get_takeovers, start_height, end_height)
@ -174,19 +177,15 @@ class BlockchainDB:
async def get_claim_metadata_count(self, start_height: int, end_height: int) -> int: async def get_claim_metadata_count(self, start_height: int, end_height: int) -> int:
return await self.run_in_executor(self.sync_get_claim_metadata_count, start_height, end_height) return await self.run_in_executor(self.sync_get_claim_metadata_count, start_height, end_height)
def sync_get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]: def sync_get_claim_metadata(self, claim_hashes) -> List[dict]:
sql = """ sql = f"""
SELECT SELECT
name, claimID, activationHeight, expirationHeight, name, claimID, activationHeight, expirationHeight, originalHeight,
(SELECT (SELECT
CASE WHEN takeover.claimID = claim.claimID THEN takeover.height END CASE WHEN takeover.claimID = claim.claimID THEN takeover.height END
FROM takeover WHERE takeover.name = claim.name FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1 ORDER BY height DESC LIMIT 1
) AS takeoverHeight, ) AS takeoverHeight,
(SELECT CASE WHEN takeover.claimID = claim.claimID THEN 1 ELSE 0 END
FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1
) AS isControlling,
(SELECT find_shortest_id(c.claimid, claim.claimid) FROM claim AS c (SELECT find_shortest_id(c.claimid, claim.claimid) FROM claim AS c
WHERE WHERE
c.nodename = claim.nodename AND c.nodename = claim.nodename AND
@ -194,18 +193,17 @@ class BlockchainDB:
c.claimid != claim.claimid c.claimid != claim.claimid
) AS shortestID ) AS shortestID
FROM claim FROM claim
WHERE originalHeight BETWEEN ? AND ? WHERE claimID IN ({','.join(['?' for _ in claim_hashes])})
ORDER BY originalHeight ORDER BY claimID
""", (start_height, end_height) """, claim_hashes
return [{ return [{
"name": r["name"], "name": r["name"],
"claim_hash_": r["claimID"], "claim_hash": r["claimID"],
"activation_height": r["activationHeight"], "activation_height": r["activationHeight"],
"expiration_height": r["expirationHeight"], "expiration_height": r["expirationHeight"],
"takeover_height": r["takeoverHeight"], "takeover_height": r["takeoverHeight"],
"is_controlling": r["isControlling"], "creation_height": r["originalHeight"],
"short_url": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}', "short_url": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}',
"short_url_": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}',
} for r in self.sync_execute_fetchall(*sql)] } for r in self.sync_execute_fetchall(*sql)]
async def get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]: async def get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]:

View file

@ -1,82 +0,0 @@
# pylint: disable=singleton-comparison
from sqlalchemy import func, desc
from sqlalchemy.future import select
from lbry.db import TXO_TYPES, CLAIM_TYPE_CODES
from lbry.db.tables import Claim, Support, TXO
def condition_unvalidated_signables(signable):
return (
(signable.c.is_signature_valid == None) &
(signable.c.channel_hash != None)
)
def get_unvalidated_signable_count(ctx, signable):
sql = (
select(func.count('*').label('total'))
.select_from(signable)
.where(condition_unvalidated_signables(signable))
)
return ctx.fetchone(sql)['total']
def select_unvalidated_signables(signable, pk, include_urls=False, include_previous=False):
sql = (
select(
pk, signable.c.signature, signable.c.signature_digest, signable.c.channel_hash, (
select(TXO.c.public_key).select_from(TXO)
.where(
(TXO.c.claim_hash == signable.c.channel_hash) &
(TXO.c.txo_type == TXO_TYPES['channel']) &
(TXO.c.height <= signable.c.height)
)
.order_by(desc(TXO.c.height))
.limit(1)
.scalar_subquery().label('public_key')
),
)
.where(condition_unvalidated_signables(signable))
)
if include_previous:
assert signable.name != 'support', "Supports cannot be updated and don't have a previous."
sql = sql.add_columns(
select(TXO.c.channel_hash).select_from(TXO)
.where(
(TXO.c.claim_hash == signable.c.claim_hash) &
(TXO.c.txo_type.in_(CLAIM_TYPE_CODES)) &
(TXO.c.height <= signable.c.height)
)
.order_by(desc(TXO.c.height)).offset(1).limit(1)
.scalar_subquery().label('previous_channel_hash')
)
if include_urls:
channel = Claim.alias('channel')
return sql.add_columns(
signable.c.short_url.label('claim_url'),
channel.c.short_url.label('channel_url')
).select_from(signable.join(channel, signable.c.channel_hash == channel.c.claim_hash))
return sql.select_from(signable)
def channel_content_count_calc(signable):
return (
select(func.count('*'))
.select_from(signable)
.where((signable.c.channel_hash == Claim.c.claim_hash) & signable.c.is_signature_valid)
.scalar_subquery()
)
def claim_support_aggregation(*cols):
return (
select(*cols)
.select_from(Support)
.where(Support.c.claim_hash == Claim.c.claim_hash)
.scalar_subquery()
)
staked_support_amount_calc = claim_support_aggregation(func.coalesce(func.sum(Support.c.amount), 0))
staked_support_count_calc = claim_support_aggregation(func.count('*'))

View file

@ -1,80 +1,39 @@
# pylint: disable=singleton-comparison # pylint: disable=singleton-comparison
import logging import logging
import functools
from contextvars import ContextVar from contextvars import ContextVar
from typing import Set from functools import partial
from typing import Optional, Tuple
from sqlalchemy import bindparam, case, distinct, text from sqlalchemy import bindparam, case, distinct, text, func, between, desc
from sqlalchemy.future import select
from sqlalchemy.schema import CreateTable from sqlalchemy.schema import CreateTable
from lbry.db import queries from lbry.db import queries
from lbry.db.tables import Block as BlockTable, TXO, TXI from lbry.db.tables import (
from lbry.db.query_context import progress, context, Event Block as BlockTable, TX, TXO, TXI, Claim, Support,
pg_add_txo_constraints_and_indexes, pg_add_txi_constraints_and_indexes
)
from lbry.db.query_context import ProgressContext, context, event_emitter
from lbry.db.queries import rows_to_txos from lbry.db.queries import rows_to_txos
from lbry.db.sync import ( from lbry.db.sync import (
select_missing_supports, select_missing_supports,
select_missing_claims, select_stale_claims, condition_spent_claims,
condition_spent_claims, condition_spent_supports, condition_spent_supports, condition_missing_supports,
set_input_addresses, update_spent_outputs set_input_addresses, update_spent_outputs,
) )
from lbry.db.utils import least from lbry.db.utils import least
from lbry.schema.url import normalize_name from lbry.db.constants import TXO_TYPES, CLAIM_TYPE_CODES
from lbry.blockchain.lbrycrd import Lbrycrd from lbry.blockchain.lbrycrd import Lbrycrd
from lbry.blockchain.block import Block, create_block_filter from lbry.blockchain.block import Block, create_block_filter
from lbry.blockchain.bcd_data_stream import BCDataStream from lbry.blockchain.bcd_data_stream import BCDataStream
from lbry.blockchain.transaction import Output from lbry.blockchain.transaction import Output, OutputScript, TXRefImmutable
from .queries import (
select, Claim, Support,
TXO_TYPES, CLAIM_TYPE_CODES,
channel_content_count_calc,
staked_support_amount_calc, staked_support_count_calc,
select_unvalidated_signables, get_unvalidated_signable_count
)
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
_chain: ContextVar[Lbrycrd] = ContextVar('chain') _chain: ContextVar[Lbrycrd] = ContextVar('chain')
SYNC_STEPS = {
"initial_sync": [],
"ongoing_sync": [],
"events": [],
}
def sync_step(event: Event, step_size=1, initial_sync=False, ongoing_sync=False):
assert event.label not in SYNC_STEPS['events'], f"Event {event.label} used more than once."
assert initial_sync or ongoing_sync, "At least one of initial_sync or ongoing_sync must be true."
SYNC_STEPS['events'].append(event.label)
if initial_sync:
SYNC_STEPS['initial_sync'].append(event.label)
if ongoing_sync:
SYNC_STEPS['ongoing_sync'].append(event.label)
def wrapper(f):
@functools.wraps(f)
def with_progress(*args, **kwargs):
with progress(event, step_size=step_size) as p:
return f(*args, **kwargs, p=p)
return with_progress
return wrapper
class ClaimChanges:
deleted_channels: Set[bytes]
channels_with_changed_content: Set[bytes]
claims_with_changed_supports: Set[bytes]
def __init__(self):
self.deleted_channels = set()
self.channels_with_changed_content = set()
self.claims_with_changed_supports = set()
def get_or_initialize_lbrycrd(ctx=None) -> Lbrycrd: def get_or_initialize_lbrycrd(ctx=None) -> Lbrycrd:
chain = _chain.get(None) chain = _chain.get(None)
if chain is not None: if chain is not None:
@ -85,37 +44,16 @@ def get_or_initialize_lbrycrd(ctx=None) -> Lbrycrd:
return chain return chain
def process_block_file(block_file_number: int, starting_height: int, initial_sync: bool): def process_block_file(block_file_number: int, starting_height: int):
ctx = context() ctx = context()
loader = ctx.get_bulk_loader() loader = ctx.get_bulk_loader()
last_block_processed = process_block_read(block_file_number, starting_height, initial_sync, loader) last_block_processed = process_block_read(block_file_number, starting_height, loader)
process_block_save(block_file_number, loader) process_block_save(block_file_number, loader)
return last_block_processed return last_block_processed
def process_metadata(starting_height: int, ending_height: int, initial_sync: bool): @event_emitter("blockchain.sync.block.read", "blocks", step_size=100)
chain = get_or_initialize_lbrycrd() def process_block_read(block_file_number: int, starting_height: int, loader, p: ProgressContext):
process_inputs_outputs(initial_sync)
changes = None
if not initial_sync:
changes = ClaimChanges()
process_claim_delete(changes)
process_claim_insert(changes)
process_claim_update(changes)
process_support_delete(changes)
process_support_insert(changes)
process_takeovers(starting_height, ending_height, chain)
process_claim_metadata(starting_height, ending_height, chain)
process_claim_signatures(changes)
process_support_signatures(changes)
if not initial_sync:
# these depend on signature validation
process_stake_calc(changes)
process_channel_content(changes)
@sync_step(Event.BLOCK_READ, step_size=100, initial_sync=True, ongoing_sync=True)
def process_block_read(block_file_number: int, starting_height: int, initial_sync: bool, loader, p=None):
chain = get_or_initialize_lbrycrd(p.ctx) chain = get_or_initialize_lbrycrd(p.ctx)
stop = p.ctx.stop_event stop = p.ctx.stop_event
new_blocks = chain.db.sync_get_blocks_in_file(block_file_number, starting_height) new_blocks = chain.db.sync_get_blocks_in_file(block_file_number, starting_height)
@ -132,22 +70,46 @@ def process_block_read(block_file_number: int, starting_height: int, initial_syn
block_height = block_info['height'] block_height = block_info['height']
fp.seek(block_info['data_offset']) fp.seek(block_info['data_offset'])
block = Block.from_data_stream(stream, block_height, block_file_number) block = Block.from_data_stream(stream, block_height, block_file_number)
loader.add_block( loader.add_block(block)
block, initial_sync and chain.db.sync_get_claim_support_txo_hashes(block_height)
)
last_block_processed = block_height last_block_processed = block_height
p.step(done) p.step(done)
return last_block_processed return last_block_processed
@sync_step(Event.BLOCK_SAVE, initial_sync=True, ongoing_sync=True) @event_emitter("blockchain.sync.block.save", "txs")
def process_block_save(block_file_number: int, loader, p=None): def process_block_save(block_file_number: int, loader, p: ProgressContext):
p.extra = {'block_file': block_file_number} p.extra = {'block_file': block_file_number}
loader.save() loader.save(TX)
@sync_step(Event.INPUT_UPDATE, initial_sync=True, ongoing_sync=True) @event_emitter("blockchain.sync.block.filters", "blocks")
def process_inputs_outputs(initial_sync=False, p=None): def process_block_filters(p: ProgressContext):
blocks = []
all_filters = []
all_addresses = []
for block in queries.get_blocks_without_filters():
addresses = {
p.ctx.ledger.address_to_hash160(r['address'])
for r in queries.get_block_tx_addresses(block_hash=block['block_hash'])
}
all_addresses.extend(addresses)
block_filter = create_block_filter(addresses)
all_filters.append(block_filter)
blocks.append({'pk': block['block_hash'], 'block_filter': block_filter})
# filters = [get_block_filter(f) for f in all_filters]
p.ctx.execute(BlockTable.update().where(BlockTable.c.block_hash == bindparam('pk')), blocks)
# txs = []
# for tx in queries.get_transactions_without_filters():
# tx_filter = create_block_filter(
# {r['address'] for r in queries.get_block_tx_addresses(tx_hash=tx['tx_hash'])}
# )
# txs.append({'pk': tx['tx_hash'], 'tx_filter': tx_filter})
# execute(TX.update().where(TX.c.tx_hash == bindparam('pk')), txs)
@event_emitter("blockchain.sync.spends", "steps")
def process_spends(initial_sync: bool, p: ProgressContext):
step = 0 step = 0
@ -184,18 +146,22 @@ def process_inputs_outputs(initial_sync=False, p=None):
str(insert_txis.compile(p.ctx.engine)).replace('txi AS old_txi', 'old_txi') str(insert_txis.compile(p.ctx.engine)).replace('txi AS old_txi', 'old_txi')
)) ))
p.step(next_step()) p.step(next_step())
# D. drop old txi # D. drop old txi and vacuum
p.ctx.execute(text("DROP TABLE old_txi;")) p.ctx.execute(text("DROP TABLE old_txi;"))
if p.ctx.is_postgres:
with p.ctx.engine.connect() as c:
c.execute(text("COMMIT;"))
c.execute(text("VACUUM ANALYZE txi;"))
p.step(next_step()) p.step(next_step())
# E. restore integrity constraint # E. restore integrity constraint
if p.ctx.is_postgres: if p.ctx.is_postgres:
p.ctx.execute(text("ALTER TABLE txi ADD PRIMARY KEY (txo_hash);")) pg_add_txi_constraints_and_indexes(p.ctx.execute)
p.step(next_step()) p.step(next_step())
else: else:
set_input_addresses(p.ctx) set_input_addresses(p.ctx)
p.step(next_step()) p.step(next_step())
# 2. Update spent TXOs setting is_spent = True # 2. Update spent TXOs setting spent_height
if initial_sync: if initial_sync:
# F. txo table reshuffling # F. txo table reshuffling
p.ctx.execute(text("ALTER TABLE txo RENAME TO old_txo;")) p.ctx.execute(text("ALTER TABLE txo RENAME TO old_txo;"))
@ -205,127 +171,267 @@ def process_inputs_outputs(initial_sync=False, p=None):
p.step(next_step()) p.step(next_step())
# G. insert # G. insert
old_txo = TXO.alias('old_txo') old_txo = TXO.alias('old_txo')
columns = ( columns = [c for c in old_txo.columns if c.name != 'spent_height']
[c for c in old_txo.columns if c.name != 'is_spent'] + select_columns = columns + [func.coalesce(TXI.c.height, 0).label('spent_height')]
[(TXI.c.txo_hash != None).label('is_spent')] insert_columns = columns + [TXO.c.spent_height]
) select_txos = select(*select_columns).select_from(old_txo.join(TXI, isouter=True))
select_txos = select(*columns).select_from(old_txo.join(TXI, isouter=True)) insert_txos = TXO.insert().from_select(insert_columns, select_txos)
insert_txos = TXO.insert().from_select(columns, select_txos)
p.ctx.execute(text( p.ctx.execute(text(
str(insert_txos.compile(p.ctx.engine)).replace('txo AS old_txo', 'old_txo') str(insert_txos.compile(p.ctx.engine))
.replace('txo AS old_txo', 'old_txo')
.replace('%(coalesce_1)s', '0')
)) ))
p.step(next_step()) p.step(next_step())
# H. drop old txo # H. drop old txo
p.ctx.execute(text("DROP TABLE old_txo;")) p.ctx.execute(text("DROP TABLE old_txo;"))
if p.ctx.is_postgres:
with p.ctx.engine.connect() as c:
c.execute(text("COMMIT;"))
c.execute(text("VACUUM ANALYZE txo;"))
p.step(next_step()) p.step(next_step())
# I. restore integrity constraint # I. restore integrity constraint
if p.ctx.is_postgres: if p.ctx.is_postgres:
p.ctx.execute(text("ALTER TABLE txo ADD PRIMARY KEY (txo_hash);")) pg_add_txo_constraints_and_indexes(p.ctx.execute)
p.step(next_step()) p.step(next_step())
else: else:
update_spent_outputs(p.ctx) update_spent_outputs(p.ctx)
p.step(next_step()) p.step(next_step())
@sync_step(Event.BLOCK_FILTER, initial_sync=True, ongoing_sync=True) def insert_claims_with_lbrycrd(done, chain, p: ProgressContext, cursor):
def process_block_filters(p=None):
blocks = []
all_filters = []
all_addresses = []
for block in queries.get_blocks_without_filters():
addresses = {
p.ctx.ledger.address_to_hash160(r['address'])
for r in queries.get_block_tx_addresses(block_hash=block['block_hash'])
}
all_addresses.extend(addresses)
block_filter = create_block_filter(addresses)
all_filters.append(block_filter)
blocks.append({'pk': block['block_hash'], 'block_filter': block_filter})
# filters = [get_block_filter(f) for f in all_filters]
p.ctx.execute(BlockTable.update().where(BlockTable.c.block_hash == bindparam('pk')), blocks)
# txs = []
# for tx in queries.get_transactions_without_filters():
# tx_filter = create_block_filter(
# {r['address'] for r in queries.get_block_tx_addresses(tx_hash=tx['tx_hash'])}
# )
# txs.append({'pk': tx['tx_hash'], 'tx_filter': tx_filter})
# execute(TX.update().where(TX.c.tx_hash == bindparam('pk')), txs)
@sync_step(Event.CLAIM_DELETE, ongoing_sync=True)
def process_claim_delete(changes: ClaimChanges, p=None):
changes.channels_with_changed_content |= {
r['channel_hash'] for r in p.ctx.fetchall(
select(distinct(Claim.c.channel_hash))
.where(condition_spent_claims(
list(set(CLAIM_TYPE_CODES) - {TXO_TYPES['channel']})
) & (Claim.c.channel_hash != None))
)
}
changes.deleted_channels |= {
r['claim_hash'] for r in p.ctx.fetchall(
select(distinct(Claim.c.claim_hash)).where(
(Claim.c.claim_type == TXO_TYPES['channel']) &
condition_spent_claims([TXO_TYPES['channel']])
)
)
}
p.start(1)
p.ctx.execute(Claim.delete().where(condition_spent_claims()))
@sync_step(Event.CLAIM_INSERT, ongoing_sync=True)
def process_claim_insert(_, p=None):
loader = p.ctx.get_bulk_loader() loader = p.ctx.get_bulk_loader()
for claim in rows_to_txos(p.ctx.fetchall(select_missing_claims)): for rows in cursor.partitions(900):
loader.add_claim(claim) claim_metadata = iter(chain.db.sync_get_claim_metadata(claim_hashes=[row['claim_hash'] for row in rows]))
loader.save() for row in rows:
metadata = next(claim_metadata, None)
if metadata is None or metadata['claim_hash'] != row.claim_hash:
@sync_step(Event.CLAIM_UPDATE, ongoing_sync=True) log.error(
def process_claim_update(_, p=None): r"During sync'ing a claim in our db couldn't find a "
loader = p.ctx.get_bulk_loader() r"match in lbrycrd's db. This could be because lbrycrd "
for claim in rows_to_txos(p.ctx.fetchall(select_stale_claims)): r"moved a block forward and updated its own claim table "
loader.update_claim(claim) r"while we were still on a previous block, or it could be "
loader.save() r"a more fundamental issue... ¯\_(ツ)_/¯"
@sync_step(Event.SUPPORT_DELETE, ongoing_sync=True)
def process_support_delete(changes: ClaimChanges, p=None):
changes.claims_with_changed_supports |= {
r['claim_hash'] for r in p.ctx.fetchall(
select(distinct(Support.c.claim_hash)).where(condition_spent_supports)
) )
} if metadata is None:
changes.channels_with_changed_content |= { break
r['channel_hash'] for r in p.ctx.fetchall( if metadata['claim_hash'] != row.claim_hash:
select(distinct(Support.c.channel_hash)) continue
.where(condition_spent_supports & (Support.c.channel_hash != None)) txo = Output(
amount=row.amount,
script=OutputScript(row.src),
tx_ref=TXRefImmutable.from_hash(row.tx_hash, row.height),
position=row.txo_position,
) )
extra = {
'timestamp': row.timestamp,
'staked_support_amount': int(row.staked_support_amount),
'staked_support_count': int(row.staked_support_count),
'short_url': metadata['short_url'],
'creation_height': metadata['creation_height'],
'activation_height': metadata['activation_height'],
'expiration_height': metadata['expiration_height'],
'takeover_height': metadata['takeover_height'],
} }
p.start(1) if hasattr(row, 'signature'):
sql = Support.delete().where(condition_spent_supports) extra.update({
p.ctx.execute(sql) 'signature': row.signature,
'signature_digest': row.signature_digest,
'channel_public_key': row.channel_public_key,
'channel_url': row.channel_url
})
loader.add_claim(txo, **extra)
if len(loader.claims) >= 10_000:
done += loader.flush(Claim)
p.step(done)
done += loader.flush(Claim)
p.step(done)
return done
@sync_step(Event.SUPPORT_INSERT, ongoing_sync=True) def channel_content_count_calc(signable):
def process_support_insert(changes: ClaimChanges, p=None): return (
select(func.count('*'))
.select_from(signable)
.where((signable.c.channel_hash == Claim.c.claim_hash) & signable.c.is_signature_valid)
.scalar_subquery()
)
@event_emitter("blockchain.sync.claims", "claims")
def process_claims(starting_height: int, blocks_added: Optional[Tuple[int, int]], p: ProgressContext):
chain = get_or_initialize_lbrycrd(p.ctx)
initial_sync = not p.ctx.has_records(Claim)
to_be_modified = p.ctx.fetchtotal(
(TXO.c.txo_type.in_(CLAIM_TYPE_CODES)) &
(TXO.c.spent_height == 0) &
(TXO.c.txo_hash.notin_(select(Claim.c.txo_hash)))
)
to_be_deleted = to_be_synced = to_be_overtaken = to_be_counted_channel_members = 0
condition_changed_stakes = condition_changed_channel_content = None
if initial_sync:
to_be_counted_channel_members = p.ctx.fetchtotal(
(TXO.c.txo_type == TXO_TYPES['channel']) &
(TXO.c.spent_height == 0)
)
else:
to_be_deleted = p.ctx.fetchtotal(condition_spent_claims())
if blocks_added:
condition_changed_stakes = (
(TXO.c.txo_type == TXO_TYPES['support']) & (
between(TXO.c.height, blocks_added[0], blocks_added[-1]) |
between(TXO.c.spent_height, blocks_added[0], blocks_added[-1])
)
)
sql = (
select(func.count(distinct(TXO.c.claim_hash)).label('total'))
.where(condition_changed_stakes)
)
to_be_synced = p.ctx.fetchone(sql)['total']
condition_changed_channel_content = (
(TXO.c.channel_hash != None) & (
between(TXO.c.height, blocks_added[0], blocks_added[-1]) |
between(TXO.c.spent_height, blocks_added[0], blocks_added[-1])
)
)
sql = (
select(func.count(distinct(TXO.c.channel_hash)).label('total'))
.where(condition_changed_channel_content)
)
to_be_synced += p.ctx.fetchone(sql)['total']
to_be_overtaken = chain.db.sync_get_takeover_count(
start_height=blocks_added[0], end_height=blocks_added[-1])
p.start(to_be_deleted + to_be_modified + to_be_synced + to_be_overtaken + to_be_counted_channel_members)
done = 0
if to_be_deleted:
deleted = p.ctx.execute(Claim.delete().where(condition_spent_claims()))
assert to_be_deleted == deleted.rowcount, \
f"Expected claims to be deleted {to_be_deleted}, actual deleted {deleted.rowcount}."
done += deleted.rowcount
p.step(done)
support = TXO.alias('support')
staked_support_amount_calc = (
select(func.coalesce(func.sum(support.c.amount), 0)).where(
(support.c.txo_type == TXO_TYPES['support']) &
(support.c.spent_height == 0)
)
)
staked_support_count_calc = (
select(func.coalesce(func.count('*'), 0)).where(
(support.c.txo_type == TXO_TYPES['support']) &
(support.c.spent_height == 0)
)
)
select_claims = (
select(
TXO.c.claim_hash, TXO.c.amount, TXO.c.position.label('txo_position'),
TX.c.tx_hash, TX.c.height, TX.c.timestamp,
func.substr(TX.c.raw, TXO.c.script_offset+1, TXO.c.script_length).label('src'),
(staked_support_amount_calc
.where(support.c.claim_hash == TXO.c.claim_hash)
.label('staked_support_amount')),
(staked_support_count_calc
.where(support.c.claim_hash == TXO.c.claim_hash)
.label('staked_support_count'))
).order_by(TXO.c.claim_hash)
)
streaming_connection = p.ctx.connection.execution_options(stream_results=True)
# all channels need to be inserted first because channel short_url will needed to
# set the contained claims canonical_urls when those are inserted next
done = insert_claims_with_lbrycrd(
done, chain, p, streaming_connection.execute(
select_claims.select_from(TXO.join(TX)).where(
(TXO.c.txo_type == TXO_TYPES['channel']) &
(TXO.c.spent_height == 0) &
(TXO.c.claim_hash.notin_(select(Claim.c.claim_hash)))
)
)
)
channel_txo = TXO.alias('channel_txo')
channel_claim = Claim.alias('channel_claim')
select_claims = (
select_claims.add_columns(
TXO.c.signature, TXO.c.signature_digest,
case([(
TXO.c.channel_hash != None,
select(channel_txo.c.public_key).select_from(channel_txo).where(
(channel_txo.c.txo_type == TXO_TYPES['channel']) &
(channel_txo.c.claim_hash == TXO.c.channel_hash) &
(channel_txo.c.height <= TXO.c.height)
).order_by(desc(channel_txo.c.height)).limit(1).scalar_subquery()
)]).label('channel_public_key'),
channel_claim.c.short_url.label('channel_url')
).select_from(
TXO
.join(TX)
.join(channel_claim, channel_claim.c.claim_hash == TXO.c.channel_hash, isouter=True)
)
)
done = insert_claims_with_lbrycrd(
done, chain, p, streaming_connection.execute(
select_claims.where(
(TXO.c.txo_type.in_(list(set(CLAIM_TYPE_CODES) - {TXO_TYPES['channel']}))) &
(TXO.c.spent_height == 0) &
(TXO.c.claim_hash.notin_(select(Claim.c.claim_hash)))
)
)
)
if initial_sync:
channel_update_member_count_sql = (
Claim.update()
.where(Claim.c.claim_type == TXO_TYPES['channel'])
.values(
signed_claim_count=channel_content_count_calc(Claim.alias('content')),
signed_support_count=channel_content_count_calc(Support),
)
)
result = p.ctx.execute(channel_update_member_count_sql)
done += result.rowcount
p.step(done)
if initial_sync:
return
select_stale_claims = select_claims.where(
(TXO.c.txo_type.in_(CLAIM_TYPE_CODES)) &
(TXO.c.spent_height == 0) &
(TXO.c.txo_hash.notin_(select(Claim.c.txo_hash)))
)
loader = p.ctx.get_bulk_loader() loader = p.ctx.get_bulk_loader()
for txo in rows_to_txos(p.ctx.fetchall(select_missing_supports)): for row in p.ctx.connection.execution_options(stream_results=True).execute(select_stale_claims):
loader.add_support(txo) txo = Output(
changes.claims_with_changed_supports.add(txo.claim_hash) amount=row['amount'],
loader.save() script=OutputScript(row['src']),
tx_ref=TXRefImmutable.from_hash(row['tx_hash'], row['height']),
position=row['txo_position'],
)
loader.update_claim(
txo, channel_url=row['channel_url'], timestamp=row['timestamp'],
staked_support_amount=int(row['staked_support_amount']),
staked_support_count=int(row['staked_support_count']),
signature=row['signature'], signature_digest=row['signature_digest'],
channel_public_key=row['channel_public_key'],
)
if len(loader.update_claims) >= 1000:
done += loader.flush(Claim)
p.step(done)
done += loader.flush(Claim)
p.step(done)
for takeover in chain.db.sync_get_takeovers(start_height=blocks_added[0], end_height=blocks_added[-1]):
@sync_step(Event.CLAIM_TRIE, step_size=100, ongoing_sync=True)
def process_takeovers(starting_height: int, ending_height: int, chain, p=None):
p.start(chain.db.sync_get_takeover_count(start_height=starting_height, end_height=ending_height))
for offset in range(starting_height, ending_height + 1):
for takeover in chain.db.sync_get_takeovers(start_height=offset, end_height=offset):
update_claims = ( update_claims = (
Claim.update() Claim.update()
.where(Claim.c.normalized == normalize_name(takeover['name'].decode())) .where(Claim.c.normalized == takeover['normalized'])
.values( .values(
is_controlling=case( is_controlling=case(
[(Claim.c.claim_hash == takeover['claim_hash'], True)], [(Claim.c.claim_hash == takeover['claim_hash'], True)],
@ -338,84 +444,75 @@ def process_takeovers(starting_height: int, ending_height: int, chain, p=None):
activation_height=least(Claim.c.activation_height, takeover['height']), activation_height=least(Claim.c.activation_height, takeover['height']),
) )
) )
p.ctx.execute(update_claims) result = p.ctx.execute(update_claims)
p.step(1) done += result.rowcount
@sync_step(Event.CLAIM_META, initial_sync=True, ongoing_sync=True)
def process_claim_metadata(starting_height: int, ending_height: int, chain, p=None):
channel = Claim.alias('channel')
stream = Claim.alias('stream')
p.start(chain.db.sync_get_claim_metadata_count(start_height=starting_height, end_height=ending_height))
claim_update_sql = (
Claim.update().where(Claim.c.claim_hash == bindparam('claim_hash_'))
.values(
canonical_url=case([(
((Claim.c.canonical_url == None) & (Claim.c.channel_hash != None)),
select(channel.c.short_url).select_from(channel)
.where(channel.c.claim_hash == Claim.c.channel_hash)
.scalar_subquery() + '/' + bindparam('short_url_')
)], else_=Claim.c.canonical_url),
staked_support_amount=staked_support_amount_calc,
staked_support_count=staked_support_count_calc,
signed_claim_count=case([(
(Claim.c.claim_type == TXO_TYPES['channel']),
channel_content_count_calc(stream)
)], else_=0),
signed_support_count=case([(
(Claim.c.claim_type == TXO_TYPES['channel']),
channel_content_count_calc(Support)
)], else_=0),
)
)
done, step_size = 0, 500
for offset in range(starting_height, ending_height + 1, step_size):
claims = chain.db.sync_get_claim_metadata(
start_height=offset, end_height=min(offset + step_size, ending_height)
)
if claims:
p.ctx.execute(claim_update_sql, claims)
done += len(claims)
p.step(done) p.step(done)
channel_update_member_count_sql = (
def signature_validation(d: dict, row: dict, public_key) -> dict: Claim.update()
d['is_signature_valid'] = False .where(
if Output.is_signature_valid(bytes(row['signature']), bytes(row['signature_digest']), public_key): (Claim.c.claim_type == TXO_TYPES['channel']) &
d['is_signature_valid'] = True Claim.c.claim_hash.in_(select(TXO.c.channel_hash).where(condition_changed_channel_content))
return d ).values(
signed_claim_count=channel_content_count_calc(Claim.alias('content')),
signed_support_count=channel_content_count_calc(Support),
@sync_step(Event.CLAIM_SIGN, initial_sync=True, ongoing_sync=True)
def process_claim_signatures(changes: ClaimChanges, p=None):
p.start(get_unvalidated_signable_count(p.ctx, Claim))
claim_updates = []
sql = select_unvalidated_signables(
Claim, Claim.c.claim_hash, include_urls=True, include_previous=changes is not None
) )
steps = 0
for claim in p.ctx.execute(sql):
claim_updates.append(
signature_validation({
'pk': claim['claim_hash'],
'canonical_url': claim['channel_url'] + '/' + claim['claim_url']
}, claim, claim['public_key'])
) )
if changes is not None: p.ctx.execute(channel_update_member_count_sql)
changes.channels_with_changed_content.add(claim['channel_hash'])
if claim['previous_channel_hash']: claim_update_supports_sql = (
changes.channels_with_changed_content.add(claim['previous_channel_hash']) Claim.update()
if len(claim_updates) > 1000: .where(Claim.c.claim_hash.in_(select(TXO.c.claim_hash).where(condition_changed_stakes)))
p.ctx.execute(Claim.update().where(Claim.c.claim_hash == bindparam('pk')), claim_updates) .values(
steps += len(claim_updates) staked_support_amount=(
p.step(steps) staked_support_amount_calc
claim_updates.clear() .where(support.c.claim_hash == Claim.c.claim_hash)
if claim_updates: .scalar_subquery())
p.ctx.execute(Claim.update().where(Claim.c.claim_hash == bindparam('pk')), claim_updates) ,
staked_support_count=(
staked_support_count_calc
.where(support.c.claim_hash == Claim.c.claim_hash)
.scalar_subquery()
),
)
)
result = p.ctx.execute(claim_update_supports_sql)
p.step(done+result.rowcount)
@sync_step(Event.SUPPORT_SIGN, initial_sync=True, ongoing_sync=True) @event_emitter("blockchain.sync.supports", "supports")
def process_support_signatures(changes: ClaimChanges, p=None): def process_supports(starting_height: int, blocks_added: Optional[Tuple[int, int]], p: ProgressContext):
done = 0
to_be_deleted = p.ctx.fetchtotal(condition_spent_supports)
to_be_inserted = p.ctx.fetchtotal(condition_missing_supports)
p.start(to_be_deleted + to_be_inserted)
sql = Support.delete().where(condition_spent_supports)
deleted = p.ctx.execute(sql)
assert to_be_deleted == deleted.rowcount,\
f"Expected supports to be deleted {to_be_deleted}, actual deleted {deleted.rowcount}."
done += deleted.rowcount
p.step(done)
if p.ctx.is_postgres:
insert_supports = partial(p.ctx.pg_copy, Support)
else:
insert_supports = partial(p.ctx.execute, Support.insert())
loader = p.ctx.get_bulk_loader()
inserted_supports, supports = 0, []
for txo in rows_to_txos(p.ctx.fetchall(select_missing_supports)):
supports.append(loader.support_to_row(txo))
if len(supports) >= 50_000:
insert_supports(supports)
inserted_supports += len(supports)
supports = []
if supports:
insert_supports(supports)
inserted_supports += len(supports)
assert to_be_inserted == inserted_supports, \
f"Expected supports to be inserted {to_be_inserted}, actual inserted {inserted_supports}."
return
p.start(get_unvalidated_signable_count(p.ctx, Support)) p.start(get_unvalidated_signable_count(p.ctx, Support))
support_updates = [] support_updates = []
for support in p.ctx.execute(select_unvalidated_signables(Support, Support.c.txo_hash)): for support in p.ctx.execute(select_unvalidated_signables(Support, Support.c.txo_hash)):
@ -430,32 +527,3 @@ def process_support_signatures(changes: ClaimChanges, p=None):
support_updates.clear() support_updates.clear()
if support_updates: if support_updates:
p.ctx.execute(Support.update().where(Support.c.txo_hash == bindparam('pk')), support_updates) p.ctx.execute(Support.update().where(Support.c.txo_hash == bindparam('pk')), support_updates)
@sync_step(Event.STAKE_CALC, ongoing_sync=True)
def process_stake_calc(changes: ClaimChanges, p=None):
p.start(len(changes.claims_with_changed_supports))
sql = (
Claim.update()
.where((Claim.c.claim_hash.in_(changes.claims_with_changed_supports)))
.values(
staked_support_amount=staked_support_amount_calc,
staked_support_count=staked_support_count_calc,
)
)
p.ctx.execute(sql)
@sync_step(Event.CLAIM_CHAN, ongoing_sync=True)
def process_channel_content(changes: ClaimChanges, p=None):
p.start(len(changes.channels_with_changed_content))
stream = Claim.alias('stream')
sql = (
Claim.update()
.where((Claim.c.claim_hash.in_(changes.channels_with_changed_content)))
.values(
signed_claim_count=channel_content_count_calc(stream),
signed_support_count=channel_content_count_calc(Support),
)
)
p.ctx.execute(sql)

View file

@ -9,12 +9,7 @@ from lbry.event import BroadcastSubscription
from lbry.service.base import Sync, BlockEvent from lbry.service.base import Sync, BlockEvent
from lbry.blockchain.lbrycrd import Lbrycrd from lbry.blockchain.lbrycrd import Lbrycrd
from .steps import ( from . import steps
SYNC_STEPS,
process_block_file,
process_block_filters,
process_metadata,
)
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -35,9 +30,7 @@ class BlockchainSync(Sync):
# created while sync was running; therefore, run a second sync # created while sync was running; therefore, run a second sync
# after first one finishes to possibly sync those new blocks. # after first one finishes to possibly sync those new blocks.
# run advance as a task so that it can be stop()'ed if necessary. # run advance as a task so that it can be stop()'ed if necessary.
self.advance_loop_task = asyncio.create_task( self.advance_loop_task = asyncio.create_task(self.advance())
self.advance(await self.db.needs_initial_sync())
)
await self.advance_loop_task await self.advance_loop_task
self.chain.subscribe() self.chain.subscribe()
self.advance_loop_task = asyncio.create_task(self.advance_loop()) self.advance_loop_task = asyncio.create_task(self.advance_loop())
@ -58,7 +51,7 @@ class BlockchainSync(Sync):
self.db.executor, f, *args self.db.executor, f, *args
) )
async def load_blocks(self, sync_steps: int, initial_sync: bool) -> Optional[Tuple[int, int]]: async def load_blocks(self) -> Optional[Tuple[int, int]]:
tasks = [] tasks = []
starting_height, ending_height = None, await self.chain.db.get_best_height() starting_height, ending_height = None, await self.chain.db.get_best_height()
tx_count = block_count = 0 tx_count = block_count = 0
@ -81,16 +74,15 @@ class BlockchainSync(Sync):
our_best_file_height+1 if starting_height is None else starting_height, our_best_file_height+1 our_best_file_height+1 if starting_height is None else starting_height, our_best_file_height+1
) )
tasks.append(self.run( tasks.append(self.run(
process_block_file, chain_file['file_number'], our_best_file_height+1, initial_sync steps.process_block_file, chain_file['file_number'], our_best_file_height+1
)) ))
if not tasks: if not tasks:
return return
await self._on_progress_controller.add({ await self._on_progress_controller.add({
"event": Event.START.label, # pylint: disable=no-member "event": "blockchain.sync.start",
"data": { "data": {
"starting_height": starting_height, "starting_height": starting_height,
"ending_height": ending_height, "ending_height": ending_height,
"sync_steps": sync_steps,
"files": len(tasks), "files": len(tasks),
"blocks": block_count, "blocks": block_count,
"txs": tx_count, "txs": tx_count,
@ -111,27 +103,22 @@ class BlockchainSync(Sync):
best_height_processed = max(f.result() for f in done) best_height_processed = max(f.result() for f in done)
return starting_height, best_height_processed return starting_height, best_height_processed
def get_steps(self, initial_sync: bool): async def advance(self):
if initial_sync: starting_height = await self.db.get_best_block_height()
sync_steps = SYNC_STEPS['initial_sync'].copy() blocks_added = await self.load_blocks()
else: process_block_filters = (
sync_steps = SYNC_STEPS['ongoing_sync'].copy() self.run(steps.process_block_filters)
if not self.conf.spv_address_filters: if blocks_added and self.conf.spv_address_filters else asyncio.sleep(0)
sync_steps.remove(Event.BLOCK_FILTER.label) # pylint: disable=no-member )
return sync_steps if blocks_added:
await self.run(steps.process_spends, blocks_added[0] == 0)
async def advance(self, initial_sync=False): await asyncio.wait([
sync_steps = self.get_steps(initial_sync) process_block_filters,
heights = await self.load_blocks(sync_steps, initial_sync) self.run(steps.process_claims, starting_height, blocks_added),
if heights: self.run(steps.process_supports, starting_height, blocks_added),
starting_height, ending_height = heights ])
await self.run(process_metadata, starting_height, ending_height, initial_sync) if blocks_added:
if self.conf.spv_address_filters: await self._on_block_controller.add(BlockEvent(blocks_added[-1]))
await self.run(process_block_filters)
await self._on_block_controller.add(BlockEvent(ending_height))
self.db.message_queue.put((
Event.COMPLETE.value, os.getpid(), len(sync_steps), len(sync_steps)
))
async def advance_loop(self): async def advance_loop(self):
while True: while True:

View file

@ -196,7 +196,7 @@ class Input(InputOutput):
class Output(InputOutput): class Output(InputOutput):
__slots__ = ( __slots__ = (
'amount', 'script', 'is_internal_transfer', 'is_spent', 'is_my_output', 'is_my_input', 'amount', 'script', 'is_internal_transfer', 'spent_height', 'is_my_output', 'is_my_input',
'channel', 'private_key', 'meta', 'sent_supports', 'sent_tips', 'received_tips', 'channel', 'private_key', 'meta', 'sent_supports', 'sent_tips', 'received_tips',
'purchase', 'purchased_claim', 'purchase_receipt', 'purchase', 'purchased_claim', 'purchase_receipt',
'reposted_claim', 'claims', '_signable' 'reposted_claim', 'claims', '_signable'
@ -204,7 +204,7 @@ class Output(InputOutput):
def __init__(self, amount: int, script: OutputScript, def __init__(self, amount: int, script: OutputScript,
tx_ref: TXRef = None, position: int = None, tx_ref: TXRef = None, position: int = None,
is_internal_transfer: Optional[bool] = None, is_spent: Optional[bool] = None, is_internal_transfer: Optional[bool] = None, spent_height: Optional[bool] = None,
is_my_output: Optional[bool] = None, is_my_input: Optional[bool] = None, is_my_output: Optional[bool] = None, is_my_input: Optional[bool] = None,
sent_supports: Optional[int] = None, sent_tips: Optional[int] = None, sent_supports: Optional[int] = None, sent_tips: Optional[int] = None,
received_tips: Optional[int] = None, received_tips: Optional[int] = None,
@ -214,7 +214,7 @@ class Output(InputOutput):
self.amount = amount self.amount = amount
self.script = script self.script = script
self.is_internal_transfer = is_internal_transfer self.is_internal_transfer = is_internal_transfer
self.is_spent = is_spent self.spent_height = spent_height
self.is_my_output = is_my_output self.is_my_output = is_my_output
self.is_my_input = is_my_input self.is_my_input = is_my_input
self.sent_supports = sent_supports self.sent_supports = sent_supports
@ -233,7 +233,7 @@ class Output(InputOutput):
def update_annotations(self, annotated: 'Output'): def update_annotations(self, annotated: 'Output'):
if annotated is None: if annotated is None:
self.is_internal_transfer = None self.is_internal_transfer = None
self.is_spent = None self.spent_height = None
self.is_my_output = None self.is_my_output = None
self.is_my_input = None self.is_my_input = None
self.sent_supports = None self.sent_supports = None
@ -241,7 +241,7 @@ class Output(InputOutput):
self.received_tips = None self.received_tips = None
else: else:
self.is_internal_transfer = annotated.is_internal_transfer self.is_internal_transfer = annotated.is_internal_transfer
self.is_spent = annotated.is_spent self.spent_height = annotated.spent_height
self.is_my_output = annotated.is_my_output self.is_my_output = annotated.is_my_output
self.is_my_input = annotated.is_my_input self.is_my_input = annotated.is_my_input
self.sent_supports = annotated.sent_supports self.sent_supports = annotated.sent_supports
@ -262,6 +262,11 @@ class Output(InputOutput):
def hash(self): def hash(self):
return self.ref.hash return self.ref.hash
@property
def is_spent(self):
if self.spent_height is not None:
return self.spent_height > 0
@property @property
def pubkey_hash(self): def pubkey_hash(self):
return self.script.values['pubkey_hash'] return self.script.values['pubkey_hash']

View file

@ -112,7 +112,6 @@ class Advanced(Basic):
self.is_single_sync_bar = False self.is_single_sync_bar = False
self.single_bar_relative_steps = 0 self.single_bar_relative_steps = 0
self.last_stats = "" self.last_stats = ""
self.sync_steps = []
self.block_savers = 0 self.block_savers = 0
self.block_readers = 0 self.block_readers = 0
self.stderr = RedirectOutput('stderr') self.stderr = RedirectOutput('stderr')
@ -123,7 +122,7 @@ class Advanced(Basic):
def stopping(self): def stopping(self):
super().stopping() super().stopping()
self.stderr.flush(self.bars['sync'].write, True) self.stderr.flush(self.bars['read'].write, True)
self.stderr.release() self.stderr.release()
def get_or_create_bar(self, name, desc, unit, total, leave=False, bar_format=None, postfix=None, position=None): def get_or_create_bar(self, name, desc, unit, total, leave=False, bar_format=None, postfix=None, position=None):
@ -138,16 +137,12 @@ class Advanced(Basic):
def sync_start(self, d): def sync_start(self, d):
self.bars.clear() self.bars.clear()
self.sync_steps = d['sync_steps']
if d['ending_height']-d['starting_height'] > 0: if d['ending_height']-d['starting_height'] > 0:
label = f"sync {d['starting_height']:,d}-{d['ending_height']:,d}" label = f"sync {d['starting_height']:,d}-{d['ending_height']:,d}"
else: else:
label = f"sync {d['ending_height']:,d}" label = f"sync {d['ending_height']:,d}"
print(label)
self.last_stats = f"{d['txs']:,d} txs, {d['claims']:,d} claims and {d['supports']:,d} supports" self.last_stats = f"{d['txs']:,d} txs, {d['claims']:,d} claims and {d['supports']:,d} supports"
self.get_or_create_bar(
"sync", label, "tasks", len(d['sync_steps']), True,
"{l_bar}{bar}| {postfix[0]:<55}", (self.last_stats,)
)
self.get_or_create_bar("read", "├─ blocks read", "blocks", d['blocks'], True) self.get_or_create_bar("read", "├─ blocks read", "blocks", d['blocks'], True)
self.get_or_create_bar("save", "└─┬ txs saved", "txs", d['txs'], True) self.get_or_create_bar("save", "└─┬ txs saved", "txs", d['txs'], True)
@ -155,16 +150,8 @@ class Advanced(Basic):
if e in ('blockchain.sync.block.read', 'blockchain.sync.block.save'): if e in ('blockchain.sync.block.read', 'blockchain.sync.block.save'):
self.update_block_bars(e, d) self.update_block_bars(e, d)
else: else:
self.update_steps_bar(e, d)
self.update_other_bars(e, d) self.update_other_bars(e, d)
def update_steps_bar(self, e, d):
sync_bar = self.bars['sync']
if d['step'] == d['total']:
sync_done = (self.sync_steps.index(e)+1)-sync_bar.last_print_n
sync_bar.postfix = (f'finished: {e}',)
sync_bar.update(sync_done)
def update_block_bars(self, event, d): def update_block_bars(self, event, d):
total_bar = self.bars[event[-4:]] total_bar = self.bars[event[-4:]]
if event.endswith("read") and self.block_readers == 0: if event.endswith("read") and self.block_readers == 0:
@ -198,7 +185,6 @@ class Advanced(Basic):
total_bar.update(diff) total_bar.update(diff)
if total_bar.total == total_bar.last_print_n: if total_bar.total == total_bar.last_print_n:
self.update_steps_bar(event, {'step': 1, 'total': 1})
if total_bar.desc.endswith('txs saved'): if total_bar.desc.endswith('txs saved'):
total_bar.desc = "├─ txs saved" total_bar.desc = "├─ txs saved"
total_bar.refresh() total_bar.refresh()
@ -221,7 +207,7 @@ class Advanced(Basic):
#bar.close() #bar.close()
def sync_complete(self): def sync_complete(self):
self.bars['sync'].postfix = (self.last_stats,) self.bars['read'].postfix = (self.last_stats,)
for bar in self.bars.values(): for bar in self.bars.values():
bar.close() bar.close()
@ -229,10 +215,10 @@ class Advanced(Basic):
e, d = event['event'], event.get('data', {}) e, d = event['event'], event.get('data', {})
if e.endswith("sync.start"): if e.endswith("sync.start"):
self.sync_start(d) self.sync_start(d)
self.stderr.flush(self.bars['sync'].write) self.stderr.flush(self.bars['read'].write)
elif e.endswith("sync.complete"): elif e.endswith("sync.complete"):
self.stderr.flush(self.bars['sync'].write, True) self.stderr.flush(self.bars['read'].write, True)
self.sync_complete() self.sync_complete()
else: else:
self.stderr.flush(self.bars['sync'].write) self.stderr.flush(self.bars['read'].write)
self.update_progress(e, d) self.update_progress(e, d)

View file

@ -196,11 +196,8 @@ class Database:
async def process_all_things_after_sync(self): async def process_all_things_after_sync(self):
return await self.run_in_executor(sync.process_all_things_after_sync) return await self.run_in_executor(sync.process_all_things_after_sync)
async def needs_initial_sync(self) -> bool: async def get_best_block_height(self) -> int:
return (await self.get_best_tx_height()) == -1 return await self.run_in_executor(q.get_best_block_height)
async def get_best_tx_height(self) -> int:
return await self.run_in_executor(q.get_best_tx_height)
async def get_best_block_height_for_file(self, file_number) -> int: async def get_best_block_height_for_file(self, file_number) -> int:
return await self.run_in_executor(q.get_best_block_height_for_file, file_number) return await self.run_in_executor(q.get_best_block_height_for_file, file_number)
@ -307,7 +304,7 @@ class Database:
return txos return txos
async def get_utxos(self, **constraints) -> Result[Output]: async def get_utxos(self, **constraints) -> Result[Output]:
return await self.get_txos(is_spent=False, **constraints) return await self.get_txos(spent_height=0, **constraints)
async def get_supports(self, **constraints) -> Result[Output]: async def get_supports(self, **constraints) -> Result[Output]:
return await self.get_utxos(txo_type=TXO_TYPES['support'], **constraints) return await self.get_utxos(txo_type=TXO_TYPES['support'], **constraints)

View file

@ -78,16 +78,15 @@ def execute_fetchall(sql):
return context().fetchall(text(sql)) return context().fetchall(text(sql))
def get_best_tx_height(): def get_best_block_height():
return context().fetchone( return context().fetchone(
select(func.coalesce(func.max(TX.c.height), -1).label('height')).select_from(TX) select(func.coalesce(func.max(Block.c.height), -1).label('height'))
)['height'] )['height']
def get_best_block_height_for_file(file_number): def get_best_block_height_for_file(file_number):
return context().fetchone( return context().fetchone(
select(func.coalesce(func.max(Block.c.height), -1).label('height')) select(func.coalesce(func.max(Block.c.height), -1).label('height'))
.select_from(Block)
.where(Block.c.file_number == file_number) .where(Block.c.file_number == file_number)
)['height'] )['height']
@ -268,7 +267,7 @@ def get_transaction_count(**constraints):
BASE_SELECT_TXO_COLUMNS = [ BASE_SELECT_TXO_COLUMNS = [
TX.c.tx_hash, TX.c.raw, TX.c.height, TX.c.position.label('tx_position'), TX.c.tx_hash, TX.c.raw, TX.c.height, TX.c.position.label('tx_position'),
TX.c.is_verified, TX.c.timestamp, TX.c.is_verified, TX.c.timestamp,
TXO.c.txo_type, TXO.c.position.label('txo_position'), TXO.c.amount, TXO.c.is_spent, TXO.c.txo_type, TXO.c.position.label('txo_position'), TXO.c.amount, TXO.c.spent_height,
TXO.c.script_offset, TXO.c.script_length, TXO.c.script_offset, TXO.c.script_length,
] ]
@ -357,7 +356,7 @@ def rows_to_txos(rows: List[dict], include_tx=True) -> List[Output]:
tx_ref=TXRefImmutable.from_hash(row['tx_hash'], row['height']), tx_ref=TXRefImmutable.from_hash(row['tx_hash'], row['height']),
position=row['txo_position'], position=row['txo_position'],
) )
txo.is_spent = bool(row['is_spent']) txo.spent_height = bool(row['spent_height'])
if 'is_my_input' in row: if 'is_my_input' in row:
txo.is_my_input = bool(row['is_my_input']) txo.is_my_input = bool(row['is_my_input'])
if 'is_my_output' in row: if 'is_my_output' in row:
@ -437,7 +436,7 @@ def get_txos(no_tx=False, include_total=False, **constraints) -> Tuple[List[Outp
channels = { channels = {
txo.claim_hash: txo for txo in txo.claim_hash: txo for txo in
get_txos( get_txos(
txo_type=TXO_TYPES['channel'], is_spent=False, txo_type=TXO_TYPES['channel'], spent_height=0,
wallet_account_ids=wallet_account_ids, claim_hash__in=channel_hashes wallet_account_ids=wallet_account_ids, claim_hash__in=channel_hashes
)[0] )[0]
} }
@ -471,7 +470,7 @@ def get_txo_sum(**constraints):
def get_balance(**constraints): def get_balance(**constraints):
return get_txo_sum(is_spent=False, **constraints) return get_txo_sum(spent_height=0, **constraints)
def get_report(account_ids): def get_report(account_ids):
@ -793,7 +792,7 @@ def add_keys(account, chain, pubkeys):
def get_supports_summary(self, **constraints): def get_supports_summary(self, **constraints):
return get_txos( return get_txos(
txo_type=TXO_TYPES['support'], txo_type=TXO_TYPES['support'],
is_spent=False, is_my_output=True, spent_height=0, is_my_output=True,
include_is_my_input=True, include_is_my_input=True,
no_tx=True, no_tx=True,
**constraints **constraints

View file

@ -1,14 +1,15 @@
import os import os
import time import time
import functools
from io import BytesIO from io import BytesIO
import multiprocessing as mp import multiprocessing as mp
from enum import Enum
from decimal import Decimal from decimal import Decimal
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass, field from dataclasses import dataclass, field
from contextvars import ContextVar from contextvars import ContextVar
from sqlalchemy import create_engine, inspect, bindparam from sqlalchemy import create_engine, inspect, bindparam, func, exists, case
from sqlalchemy.future import select
from sqlalchemy.engine import Engine, Connection from sqlalchemy.engine import Engine, Connection
from sqlalchemy.sql import Insert from sqlalchemy.sql import Insert
try: try:
@ -93,6 +94,18 @@ class QueryContext:
rows = self.connection.execute(sql, *args).fetchall() rows = self.connection.execute(sql, *args).fetchall()
return [dict(row._mapping) for row in rows] return [dict(row._mapping) for row in rows]
def fetchtotal(self, condition):
sql = select(func.count('*').label('total')).where(condition)
return self.fetchone(sql)['total']
def fetchmax(self, column):
sql = select(func.max(column).label('max_result'))
return self.fetchone(sql)['max_result']
def has_records(self, table):
sql = select(exists([1], from_obj=table).label('result'))
return self.fetchone(sql)['result']
def insert_or_ignore(self, table): def insert_or_ignore(self, table):
if self.is_sqlite: if self.is_sqlite:
return table.insert().prefix_with("OR IGNORE") return table.insert().prefix_with("OR IGNORE")
@ -178,68 +191,58 @@ def uninitialize():
_context.set(None) _context.set(None)
class ProgressUnit(Enum): class Event:
NONE = "", None _events: List['Event'] = []
TASKS = "tasks", None __slots__ = 'id', 'name', 'unit', 'step_size'
BLOCKS = "blocks", Block
TXS = "txs", TX
TXIS = "txis", TXI
CLAIMS = "claims", Claim
SUPPORTS = "supports", Support
def __new__(cls, value, table): def __init__(self, name: str, unit: str, step_size: int):
next_id = len(cls.__members__) + 1 self.name = name
obj = object.__new__(cls) self.unit = unit
obj._value_ = next_id self.step_size = step_size
obj.label = value
obj.table = table @classmethod
return obj def get_by_id(cls, event_id) -> 'Event':
return cls._events[event_id]
@classmethod
def get_by_name(cls, name) -> 'Event':
for event in cls._events:
if event.name == name:
return event
@classmethod
def add(cls, name: str, unit: str, step_size: int) -> 'Event':
assert cls.get_by_name(name) is None, f"Event {name} already exists."
event = cls(name, unit, step_size)
cls._events.append(event)
event.id = cls._events.index(event)
return event
class Event(Enum): def event_emitter(name: str, unit: str, step_size=1):
START = "blockchain.sync.start", ProgressUnit.BLOCKS event = Event.add(name, unit, step_size)
COMPLETE = "blockchain.sync.complete", ProgressUnit.TASKS
# full node specific sync events def wrapper(f):
BLOCK_READ = "blockchain.sync.block.read", ProgressUnit.BLOCKS @functools.wraps(f)
BLOCK_SAVE = "blockchain.sync.block.save", ProgressUnit.TXS def with_progress(*args, **kwargs):
BLOCK_FILTER = "blockchain.sync.block.filter", ProgressUnit.BLOCKS with progress(event, step_size=step_size) as p:
CLAIM_META = "blockchain.sync.claim.meta", ProgressUnit.CLAIMS return f(*args, **kwargs, p=p)
CLAIM_TRIE = "blockchain.sync.claim.trie", ProgressUnit.CLAIMS return with_progress
STAKE_CALC = "blockchain.sync.claim.stakes", ProgressUnit.CLAIMS
CLAIM_CHAN = "blockchain.sync.claim.channels", ProgressUnit.CLAIMS
CLAIM_SIGN = "blockchain.sync.claim.signatures", ProgressUnit.CLAIMS
SUPPORT_SIGN = "blockchain.sync.support.signatures", ProgressUnit.SUPPORTS
TRENDING_CALC = "blockchain.sync.trending", ProgressUnit.BLOCKS
# full node + light client sync events return wrapper
INPUT_UPDATE = "db.sync.input", ProgressUnit.TXIS
CLAIM_DELETE = "db.sync.claim.delete", ProgressUnit.CLAIMS
CLAIM_INSERT = "db.sync.claim.insert", ProgressUnit.CLAIMS
CLAIM_UPDATE = "db.sync.claim.update", ProgressUnit.CLAIMS
SUPPORT_DELETE = "db.sync.support.delete", ProgressUnit.SUPPORTS
SUPPORT_INSERT = "db.sync.support.insert", ProgressUnit.SUPPORTS
def __new__(cls, value, unit: ProgressUnit):
next_id = len(cls.__members__) + 1
obj = object.__new__(cls)
obj._value_ = next_id
obj.label = value
obj.unit = unit
return obj
class ProgressPublisher(EventQueuePublisher): class ProgressPublisher(EventQueuePublisher):
def message_to_event(self, message): def message_to_event(self, message):
event = Event(message[0]) # pylint: disable=no-value-for-parameter event = Event.get_by_id(message[0])
d = { d = {
"event": event.label, "event": event.name,
"data": { "data": {
"pid": message[1], "pid": message[1],
"step": message[2], "step": message[2],
"total": message[3], "total": message[3],
"unit": event.unit.label "unit": event.unit
} }
} }
if len(message) > 4 and isinstance(message[4], dict): if len(message) > 4 and isinstance(message[4], dict):
@ -294,12 +297,12 @@ class ProgressContext:
def get_event_args(self, done): def get_event_args(self, done):
if self.extra is not None: if self.extra is not None:
return self.event.value, self.ctx.pid, done, self.total, self.extra return self.event.id, self.ctx.pid, done, self.total, self.extra
return self.event.value, self.ctx.pid, done, self.total return self.event.id, self.ctx.pid, done, self.total
def progress(e: Event, step_size=1) -> ProgressContext: def progress(e: Event, step_size=1) -> ProgressContext:
ctx = context(e.label) ctx = context(e.name)
ctx.current_progress = ProgressContext(ctx, e, step_size=step_size) ctx.current_progress = ProgressContext(ctx, e, step_size=step_size)
return ctx.current_progress return ctx.current_progress
@ -354,6 +357,7 @@ class BulkLoader:
'tx_hash': tx.hash, 'tx_hash': tx.hash,
'txo_hash': txi.txo_ref.hash, 'txo_hash': txi.txo_ref.hash,
'position': txi.position, 'position': txi.position,
'height': tx.height,
} }
def txo_to_row(self, tx: Transaction, txo: Output) -> dict: def txo_to_row(self, tx: Transaction, txo: Output) -> dict:
@ -371,6 +375,8 @@ class BulkLoader:
'claim_hash': None, 'claim_hash': None,
'claim_name': None, 'claim_name': None,
'channel_hash': None, 'channel_hash': None,
'signature': None,
'signature_digest': None,
'public_key': None, 'public_key': None,
'public_key_hash': None 'public_key_hash': None
} }
@ -380,6 +386,8 @@ class BulkLoader:
row['txo_type'] = TXO_TYPES.get(claim.claim_type, TXO_TYPES['stream']) row['txo_type'] = TXO_TYPES.get(claim.claim_type, TXO_TYPES['stream'])
if claim.is_signed: if claim.is_signed:
row['channel_hash'] = claim.signing_channel_hash row['channel_hash'] = claim.signing_channel_hash
row['signature'] = txo.get_encoded_signature()
row['signature_digest'] = txo.get_signature_digest(self.ledger)
if claim.is_channel: if claim.is_channel:
row['public_key'] = claim.channel.public_key_bytes row['public_key'] = claim.channel.public_key_bytes
row['public_key_hash'] = self.ledger.address_to_hash160( row['public_key_hash'] = self.ledger.address_to_hash160(
@ -406,111 +414,89 @@ class BulkLoader:
pass pass
return row return row
def claim_to_rows(self, txo: Output) -> Tuple[dict, List]: def claim_to_rows(
try: self, txo: Output, timestamp: int, staked_support_amount: int, staked_support_count: int,
claim_name = txo.claim_name.replace('\x00', '') signature: bytes = None, signature_digest: bytes = None, channel_public_key: bytes = None,
normalized_name = txo.normalized_name ) -> Tuple[dict, List]:
except UnicodeDecodeError:
return {}, [] d = {
tx = txo.tx_ref.tx
claim_hash = txo.claim_hash
claim_record = {
'claim_hash': claim_hash,
'claim_id': txo.claim_id,
'claim_name': claim_name,
'normalized': normalized_name,
'address': txo.get_address(self.ledger),
'txo_hash': txo.ref.hash,
'amount': txo.amount,
'timestamp': tx.timestamp,
'release_time': None,
'height': tx.height,
'title': None,
'author': None,
'description': None,
'claim_type': None, 'claim_type': None,
'address': txo.get_address(self.ledger),
'txo_hash': txo.hash,
'amount': txo.amount,
'height': txo.tx_ref.height,
'timestamp': timestamp,
# support
'staked_amount': txo.amount + staked_support_amount,
'staked_support_amount': staked_support_amount,
'staked_support_count': staked_support_count,
# basic metadata
'title': None,
'description': None,
'author': None,
# streams # streams
'stream_type': None, 'stream_type': None,
'media_type': None, 'media_type': None,
'duration': None,
'release_time': None,
'fee_amount': 0, 'fee_amount': 0,
'fee_currency': None, 'fee_currency': None,
'duration': None,
# reposts # reposts
'reposted_claim_hash': None, 'reposted_claim_hash': None,
# signed claims # signed claims
'channel_hash': None, 'channel_hash': None,
'signature': None,
'signature_digest': None,
'is_signature_valid': None, 'is_signature_valid': None,
} }
try: claim = txo.can_decode_claim
claim = txo.claim if not claim:
except Exception: return d, []
#self.logger.exception(f"Could not parse claim protobuf for {tx.id}:{txo.position}.")
return claim_record, []
if claim.is_stream: if claim.is_stream:
claim_record['claim_type'] = TXO_TYPES['stream'] d['claim_type'] = TXO_TYPES['stream']
claim_record['stream_type'] = STREAM_TYPES[guess_stream_type(claim_record['media_type'])] d['stream_type'] = STREAM_TYPES[guess_stream_type(d['media_type'])]
claim_record['media_type'] = claim.stream.source.media_type d['media_type'] = claim.stream.source.media_type
claim_record['title'] = claim.stream.title.replace('\x00', '') d['title'] = claim.stream.title.replace('\x00', '')
claim_record['description'] = claim.stream.description.replace('\x00', '') d['description'] = claim.stream.description.replace('\x00', '')
claim_record['author'] = claim.stream.author.replace('\x00', '') d['author'] = claim.stream.author.replace('\x00', '')
if claim.stream.video and claim.stream.video.duration: if claim.stream.video and claim.stream.video.duration:
claim_record['duration'] = claim.stream.video.duration d['duration'] = claim.stream.video.duration
if claim.stream.audio and claim.stream.audio.duration: if claim.stream.audio and claim.stream.audio.duration:
claim_record['duration'] = claim.stream.audio.duration d['duration'] = claim.stream.audio.duration
if claim.stream.release_time: if claim.stream.release_time:
claim_record['release_time'] = claim.stream.release_time d['release_time'] = claim.stream.release_time
if claim.stream.has_fee: if claim.stream.has_fee:
fee = claim.stream.fee fee = claim.stream.fee
if isinstance(fee.currency, str):
claim_record['fee_currency'] = fee.currency.lower()
if isinstance(fee.amount, Decimal): if isinstance(fee.amount, Decimal):
claim_record['fee_amount'] = int(fee.amount*1000) d['fee_amount'] = int(fee.amount*1000)
if isinstance(fee.currency, str):
d['fee_currency'] = fee.currency.lower()
elif claim.is_repost: elif claim.is_repost:
claim_record['claim_type'] = TXO_TYPES['repost'] d['claim_type'] = TXO_TYPES['repost']
claim_record['reposted_claim_hash'] = claim.repost.reference.claim_hash d['reposted_claim_hash'] = claim.repost.reference.claim_hash
elif claim.is_channel: elif claim.is_channel:
claim_record['claim_type'] = TXO_TYPES['channel'] d['claim_type'] = TXO_TYPES['channel']
if claim.is_signed: if claim.is_signed:
claim_record['channel_hash'] = claim.signing_channel_hash d['channel_hash'] = claim.signing_channel_hash
claim_record['signature'] = txo.get_encoded_signature() d['is_signature_valid'] = Output.is_signature_valid(
claim_record['signature_digest'] = txo.get_signature_digest(self.ledger) signature, signature_digest, channel_public_key
)
tags = []
if claim.message.tags:
claim_hash = txo.claim_hash
tags = [ tags = [
{'claim_hash': claim_hash, 'tag': tag} for tag in clean_tags(claim.message.tags) {'claim_hash': claim_hash, 'tag': tag}
for tag in clean_tags(claim.message.tags)
] ]
return claim_record, tags return d, tags
def add_block(self, block: Block, add_claims_supports: set = None): def support_to_row(self, txo):
self.blocks.append(self.block_to_row(block))
for tx in block.txs:
self.add_transaction(block.block_hash, tx, add_claims_supports)
return self
def add_transaction(self, block_hash: bytes, tx: Transaction, add_claims_supports: set = None):
self.txs.append(self.tx_to_row(block_hash, tx))
for txi in tx.inputs:
if txi.coinbase is None:
self.txis.append(self.txi_to_row(tx, txi))
for txo in tx.outputs:
self.txos.append(self.txo_to_row(tx, txo))
if add_claims_supports:
if txo.is_support and txo.hash in add_claims_supports:
self.add_support(txo)
elif txo.is_claim and txo.hash in add_claims_supports:
self.add_claim(txo)
return self
def add_support(self, txo: Output):
tx = txo.tx_ref.tx tx = txo.tx_ref.tx
claim_hash = txo.claim_hash d = {
support_record = {
'txo_hash': txo.ref.hash, 'txo_hash': txo.ref.hash,
'claim_hash': claim_hash, 'claim_hash': txo.claim_hash,
'address': txo.get_address(self.ledger), 'address': txo.get_address(self.ledger),
'amount': txo.amount, 'amount': txo.amount,
'height': tx.height, 'height': tx.height,
@ -519,55 +505,94 @@ class BulkLoader:
'signature': None, 'signature': None,
'signature_digest': None, 'signature_digest': None,
} }
self.supports.append(support_record)
support = txo.can_decode_support support = txo.can_decode_support
if support: if support:
support_record['emoji'] = support.emoji d['emoji'] = support.emoji
if support.is_signed: if support.is_signed:
support_record['channel_hash'] = support.signing_channel_hash d['channel_hash'] = support.signing_channel_hash
support_record['signature'] = txo.get_encoded_signature() d['signature'] = txo.get_encoded_signature()
support_record['signature_digest'] = txo.get_signature_digest(None) d['signature_digest'] = txo.get_signature_digest(None)
return d
def add_claim(self, txo: Output): def add_block(self, block: Block):
claim, tags = self.claim_to_rows(txo) self.blocks.append(self.block_to_row(block))
if claim: for tx in block.txs:
tx = txo.tx_ref.tx self.add_transaction(block.block_hash, tx)
if txo.script.is_claim_name: return self
claim['creation_height'] = tx.height
claim['creation_timestamp'] = tx.timestamp def add_transaction(self, block_hash: bytes, tx: Transaction):
self.txs.append(self.tx_to_row(block_hash, tx))
for txi in tx.inputs:
if txi.coinbase is None:
self.txis.append(self.txi_to_row(tx, txi))
for txo in tx.outputs:
self.txos.append(self.txo_to_row(tx, txo))
return self
def add_support(self, txo: Output):
self.supports.append(self.support_to_row(txo))
def add_claim(
self, txo: Output, short_url: str,
creation_height: int, activation_height: int, expiration_height: int,
takeover_height: int = None, channel_url: str = None, **extra):
try:
claim_name = txo.claim_name.replace('\x00', '')
normalized_name = txo.normalized_name
except UnicodeDecodeError:
return self
d, tags = self.claim_to_rows(txo, **extra)
d['claim_hash'] = txo.claim_hash
d['claim_id'] = txo.claim_id
d['claim_name'] = claim_name
d['normalized'] = normalized_name
d['short_url'] = short_url
d['creation_height'] = creation_height
d['activation_height'] = activation_height
d['expiration_height'] = expiration_height
d['takeover_height'] = takeover_height
d['is_controlling'] = takeover_height is not None
if d['is_signature_valid']:
d['canonical_url'] = channel_url + '/' + short_url
else: else:
claim['creation_height'] = None d['canonical_url'] = None
claim['creation_timestamp'] = None self.claims.append(d)
self.claims.append(claim)
self.tags.extend(tags) self.tags.extend(tags)
return self return self
def update_claim(self, txo: Output): def update_claim(self, txo: Output, channel_url: Optional[str], **extra):
claim, tags = self.claim_to_rows(txo) d, tags = self.claim_to_rows(txo, **extra)
if claim: d['pk'] = txo.claim_hash
claim['claim_hash_'] = claim.pop('claim_hash') d['channel_url'] = channel_url
self.update_claims.append(claim) d['set_canonical_url'] = d['is_signature_valid']
self.delete_tags.append({'claim_hash_': claim['claim_hash_']}) self.update_claims.append(d)
self.delete_tags.append({'pk': txo.claim_hash})
self.tags.extend(tags) self.tags.extend(tags)
return self return self
def save(self, batch_size=10000): def get_queries(self):
queries = ( return (
(Block.insert(), self.blocks), (Block.insert(), self.blocks),
(TX.insert(), self.txs), (TX.insert(), self.txs),
(TXO.insert(), self.txos), (TXO.insert(), self.txos),
(TXI.insert(), self.txis), (TXI.insert(), self.txis),
(Claim.insert(), self.claims), (Claim.insert(), self.claims),
(Tag.delete().where(Tag.c.claim_hash == bindparam('claim_hash_')), self.delete_tags), (Tag.delete().where(Tag.c.claim_hash == bindparam('pk')), self.delete_tags),
(Claim.update().where(Claim.c.claim_hash == bindparam('claim_hash_')), self.update_claims), (Claim.update().where(Claim.c.claim_hash == bindparam('pk')).values(
canonical_url=case([
(bindparam('set_canonical_url'), bindparam('channel_url') + '/' + Claim.c.short_url)
], else_=None)
), self.update_claims),
(Tag.insert(), self.tags), (Tag.insert(), self.tags),
(Support.insert(), self.supports), (Support.insert(), self.supports),
) )
def save(self, unit_table, batch_size=10000):
queries = self.get_queries()
p = self.ctx.current_progress p = self.ctx.current_progress
done = row_scale = 0 done = row_scale = 0
if p: if p:
unit_table = p.event.unit.table
progress_total, row_total = 0, sum(len(q[1]) for q in queries) progress_total, row_total = 0, sum(len(q[1]) for q in queries)
for sql, rows in queries: for sql, rows in queries:
if sql.table == unit_table: if sql.table == unit_table:
@ -610,3 +635,18 @@ class BulkLoader:
if p: if p:
done += int(len(chunk_rows)/row_scale) done += int(len(chunk_rows)/row_scale)
p.step(done) p.step(done)
def flush(self, done_counter_table) -> int:
execute = self.ctx.connection.execute
done = 0
for sql, rows in self.get_queries():
if not rows:
continue
if self.ctx.is_postgres and isinstance(sql, Insert):
self.ctx.pg_copy(sql.table, rows)
else:
execute(sql, rows)
if sql.table == done_counter_table:
done += len(rows)
rows.clear()
return done

View file

@ -61,16 +61,20 @@ def set_input_addresses(ctx):
def update_spent_outputs(ctx): def update_spent_outputs(ctx):
# Update spent TXOs setting is_spent = True # Update spent TXOs setting spent_height
set_is_spent = ( set_spent_height = (
TXO.update() TXO.update()
.values({TXO.c.is_spent: True}) .values({
.where( TXO.c.spent_height: (
(TXO.c.is_spent == False) & select(TXI.c.height)
.where(TXI.c.txo_hash == TXO.c.txo_hash)
)
}).where(
(TXO.c.spent_height == 0) &
(TXO.c.txo_hash.in_(select(TXI.c.txo_hash))) (TXO.c.txo_hash.in_(select(TXI.c.txo_hash)))
) )
) )
ctx.execute(set_is_spent) ctx.execute(set_spent_height)
def condition_spent_claims(claim_type: list = None): def condition_spent_claims(claim_type: list = None):
@ -84,14 +88,14 @@ def condition_spent_claims(claim_type: list = None):
else: else:
type_filter = TXO.c.txo_type.in_(CLAIM_TYPE_CODES) type_filter = TXO.c.txo_type.in_(CLAIM_TYPE_CODES)
return Claim.c.claim_hash.notin_( return Claim.c.claim_hash.notin_(
select(TXO.c.claim_hash).where(type_filter & (TXO.c.is_spent == False)) select(TXO.c.claim_hash).where(type_filter & (TXO.c.spent_height == 0))
) )
# find UTXOs that are claims and their claim_id is not in claim table, # find UTXOs that are claims and their claim_id is not in claim table,
# this means they need to be inserted # this means they need to be inserted
select_missing_claims = ( select_missing_claims = (
select_txos(txo_type__in=CLAIM_TYPE_CODES, is_spent=False, claim_id_not_in_claim_table=True) select_txos(txo_type__in=CLAIM_TYPE_CODES, spent_height=0, claim_id_not_in_claim_table=True)
) )
@ -100,7 +104,7 @@ select_missing_claims = (
# all claims_ids should match between TXO and Claim table but txo_hashes will not match for # all claims_ids should match between TXO and Claim table but txo_hashes will not match for
# claims that are not up-to-date # claims that are not up-to-date
select_stale_claims = ( select_stale_claims = (
select_txos(txo_type__in=CLAIM_TYPE_CODES, is_spent=False, txo_id_not_in_claim_table=True) select_txos(txo_type__in=CLAIM_TYPE_CODES, spent_height=0, txo_id_not_in_claim_table=True)
) )
@ -108,12 +112,19 @@ condition_spent_supports = (
Support.c.txo_hash.notin_( Support.c.txo_hash.notin_(
select(TXO.c.txo_hash).where( select(TXO.c.txo_hash).where(
(TXO.c.txo_type == TXO_TYPES['support']) & (TXO.c.txo_type == TXO_TYPES['support']) &
(TXO.c.is_spent == False) (TXO.c.spent_height == 0)
) )
) )
) )
condition_missing_supports = (
(TXO.c.txo_type == TXO_TYPES['support']) &
(TXO.c.spent_height == 0) &
(TXO.c.txo_hash.notin_(select(Support.c.txo_hash)))
)
select_missing_supports = ( select_missing_supports = (
select_txos(txo_type=TXO_TYPES['support'], is_spent=False, txo_id_not_in_support_table=True) select_txos(txo_type=TXO_TYPES['support'], spent_height=0, txo_id_not_in_support_table=True)
) )

View file

@ -1,9 +1,11 @@
# pylint: skip-file # pylint: skip-file
from sqlalchemy import ( from sqlalchemy import (
MetaData, Table, Column, ForeignKey, MetaData, Table, Column, ForeignKey, PrimaryKeyConstraint,
LargeBinary, Text, SmallInteger, Integer, BigInteger, Boolean LargeBinary, Text, SmallInteger, Integer, BigInteger, Boolean,
text
) )
from .constants import TXO_TYPES
SCHEMA_VERSION = '1.4' SCHEMA_VERSION = '1.4'
@ -71,9 +73,9 @@ TXO = Table(
Column('position', SmallInteger), Column('position', SmallInteger),
Column('amount', BigInteger), Column('amount', BigInteger),
Column('height', Integer), Column('height', Integer),
Column('spent_height', Integer, server_default='0'),
Column('script_offset', Integer), Column('script_offset', Integer),
Column('script_length', Integer), Column('script_length', Integer),
Column('is_spent', Boolean, server_default='0'),
Column('is_reserved', Boolean, server_default='0'), Column('is_reserved', Boolean, server_default='0'),
# claims # claims
@ -82,6 +84,8 @@ TXO = Table(
Column('claim_hash', LargeBinary, nullable=True), Column('claim_hash', LargeBinary, nullable=True),
Column('claim_name', Text, nullable=True), Column('claim_name', Text, nullable=True),
Column('channel_hash', LargeBinary, nullable=True), # claims in channel Column('channel_hash', LargeBinary, nullable=True), # claims in channel
Column('signature', LargeBinary, nullable=True),
Column('signature_digest', LargeBinary, nullable=True),
# channels # channels
Column('public_key', LargeBinary, nullable=True), Column('public_key', LargeBinary, nullable=True),
@ -91,17 +95,36 @@ TXO = Table(
txo_join_account = TXO.join(AccountAddress, TXO.columns.address == AccountAddress.columns.address) txo_join_account = TXO.join(AccountAddress, TXO.columns.address == AccountAddress.columns.address)
def pg_add_txo_constraints_and_indexes(execute):
execute(text("ALTER TABLE txo ADD PRIMARY KEY (txo_hash);"))
execute(text(f"""
CREATE INDEX txo_channel_hash_w_height_desc_and_pub_key
ON txo (claim_hash, height desc) INCLUDE (public_key)
WHERE txo_type={TXO_TYPES['channel']};
"""))
execute(text(f"""
CREATE INDEX txo_unspent_supports
ON txo (claim_hash) INCLUDE (amount)
WHERE spent_height = 0 AND txo_type={TXO_TYPES['support']};
"""))
TXI = Table( TXI = Table(
'txi', metadata, 'txi', metadata,
Column('tx_hash', LargeBinary, ForeignKey(TX.columns.tx_hash)), Column('tx_hash', LargeBinary, ForeignKey(TX.columns.tx_hash)),
Column('txo_hash', LargeBinary, ForeignKey(TXO.columns.txo_hash), primary_key=True), Column('txo_hash', LargeBinary, ForeignKey(TXO.columns.txo_hash), primary_key=True),
Column('address', Text, nullable=True), Column('address', Text, nullable=True),
Column('position', SmallInteger), Column('position', SmallInteger),
Column('height', Integer),
) )
txi_join_account = TXI.join(AccountAddress, TXI.columns.address == AccountAddress.columns.address) txi_join_account = TXI.join(AccountAddress, TXI.columns.address == AccountAddress.columns.address)
def pg_add_txi_constraints_and_indexes(execute):
execute(text("ALTER TABLE txi ADD PRIMARY KEY (txo_hash);"))
Claim = Table( Claim = Table(
'claim', metadata, 'claim', metadata,
Column('claim_hash', LargeBinary, primary_key=True), Column('claim_hash', LargeBinary, primary_key=True),
@ -111,19 +134,20 @@ Claim = Table(
Column('address', Text), Column('address', Text),
Column('txo_hash', LargeBinary, ForeignKey(TXO.columns.txo_hash)), Column('txo_hash', LargeBinary, ForeignKey(TXO.columns.txo_hash)),
Column('amount', BigInteger), Column('amount', BigInteger),
Column('staked_amount', BigInteger, server_default='0'), Column('staked_amount', BigInteger),
Column('timestamp', Integer), # last updated timestamp Column('timestamp', Integer), # last updated timestamp
Column('creation_timestamp', Integer), Column('creation_timestamp', Integer),
Column('release_time', Integer, nullable=True), Column('release_time', Integer, nullable=True),
Column('height', Integer), # last updated height Column('height', Integer), # last updated height
Column('creation_height', Integer), Column('creation_height', Integer),
Column('activation_height', Integer, nullable=True), Column('activation_height', Integer),
Column('expiration_height', Integer, nullable=True), Column('expiration_height', Integer),
Column('takeover_height', Integer, nullable=True), Column('takeover_height', Integer, nullable=True),
Column('is_controlling', Boolean, server_default='0'), Column('sync_height', Integer), # claim dynamic values up-to-date as of this height (eg. staked_amount)
Column('is_controlling', Boolean),
# normalized#shortest-unique-claim_id # normalized#shortest-unique-claim_id
Column('short_url', Text, nullable=True), Column('short_url', Text),
# channel's-short_url/normalized#shortest-unique-claim_id-within-channel # channel's-short_url/normalized#shortest-unique-claim_id-within-channel
Column('canonical_url', Text, nullable=True), Column('canonical_url', Text, nullable=True),
@ -152,8 +176,6 @@ Claim = Table(
# claims which are inside channels # claims which are inside channels
Column('channel_hash', LargeBinary, nullable=True), Column('channel_hash', LargeBinary, nullable=True),
Column('signature', LargeBinary, nullable=True),
Column('signature_digest', LargeBinary, nullable=True),
Column('is_signature_valid', Boolean, nullable=True), Column('is_signature_valid', Boolean, nullable=True),
Column('trending_group', BigInteger, server_default='0'), Column('trending_group', BigInteger, server_default='0'),

View file

@ -10,6 +10,7 @@ from lbry.service import FullNode
from lbry.console import Advanced, Basic from lbry.console import Advanced, Basic
from lbry.conf import Config from lbry.conf import Config
from lbry.db.utils import chunk from lbry.db.utils import chunk
from lbry.db.query_context import Event
def cause_protobuf_stderr(): def cause_protobuf_stderr():
@ -33,7 +34,6 @@ class Simulator:
self.starting_file = 0 self.starting_file = 0
self.processes = console.service.db.processes self.processes = console.service.db.processes
self.steps = []
self.txs = 0 self.txs = 0
self.claims = 0 self.claims = 0
self.supports = 0 self.supports = 0
@ -46,7 +46,6 @@ class Simulator:
async def advance(self, initial_sync: bool, ending_height: int, files: List[int], txs: int): async def advance(self, initial_sync: bool, ending_height: int, files: List[int], txs: int):
self.ending_height = ending_height self.ending_height = ending_height
self.steps = self.sync.get_steps(initial_sync)
self.txs = txs self.txs = txs
self.claims = int(txs/4) self.claims = int(txs/4)
self.supports = int(txs/2) self.supports = int(txs/2)
@ -57,7 +56,6 @@ class Simulator:
"ending_height": ending_height, "ending_height": ending_height,
"files": len(files), "files": len(files),
"blocks": self.blocks, "blocks": self.blocks,
"sync_steps": self.steps,
"txs": self.txs, "txs": self.txs,
"claims": self.claims, "claims": self.claims,
"supports": self.supports, "supports": self.supports,
@ -78,14 +76,14 @@ class Simulator:
txs_synced += txs txs_synced += txs
tasks.append(self.sync_block_file(file, blocks, txs)) tasks.append(self.sync_block_file(file, blocks, txs))
await asyncio.wait(tasks) await asyncio.wait(tasks)
for step in self.steps: for step in Event._events:
if step in ("blockchain.sync.block.read", "blockchain.sync.block.save"): if step.name in ("blockchain.sync.block.read", "blockchain.sync.block.save"):
continue continue
await getattr(self, step.replace('.', '_'))() await getattr(self, step.name.replace('.', '_'))()
await self.progress.add({ #await self.progress.add({
"event": "blockchain.sync.complete", # "event": "blockchain.sync.complete",
"data": {"step": len(self.steps), "total": len(self.steps), "unit": "tasks"} # "data": {"step": len(self.steps), "total": len(self.steps), "unit": "tasks"}
}) #})
self.ending_height = ending_height+1 self.ending_height = ending_height+1
self.starting_height = self.ending_height self.starting_height = self.ending_height
@ -123,50 +121,26 @@ class Simulator:
await asyncio.sleep(delay) await asyncio.sleep(delay)
await self.progress.add({"event": event, "data": {"step": steps, "total": steps, "unit": unit}}) await self.progress.add({"event": event, "data": {"step": steps, "total": steps, "unit": unit}})
async def db_sync_input(self): async def blockchain_sync_block_filters(self):
await self.generate_steps("db.sync.input", 2, "txis") await self.generate_steps("blockchain.sync.block.filters", 5, "blocks")
async def db_sync_claim_delete(self): async def blockchain_sync_spends(self):
await self.generate_steps("db.sync.claim.delete", 1, "claims") await self.generate_steps("blockchain.sync.spends", 5, "steps")
async def db_sync_claim_insert(self): async def blockchain_sync_claims(self):
await self.generate_steps("db.sync.claim.insert", 1, "claims")
async def db_sync_claim_update(self):
await self.generate_steps("db.sync.claim.update", 0, "claims")
async def db_sync_support_delete(self):
await self.generate_steps("db.sync.support.delete", 1, "supports")
async def db_sync_support_insert(self):
await self.generate_steps("db.sync.support.insert", 1, "supports")
async def blockchain_sync_claim_trie(self):
await self.generate_steps("blockchain.sync.claim.trie", 1, "claims")
async def blockchain_sync_claim_meta(self):
for i in range(0, self.claims, 1_000): for i in range(0, self.claims, 1_000):
await self.progress.add({ await self.progress.add({
"event": "blockchain.sync.claim.meta", "event": "blockchain.sync.claims",
"data": {"step": i, "total": self.claims, "unit": "claims"} "data": {"step": i, "total": self.claims, "unit": "claims"}
}) })
await asyncio.sleep(0.1) await asyncio.sleep(0.1)
await self.progress.add({ await self.progress.add({
"event": "blockchain.sync.claim.meta", "event": "blockchain.sync.claims",
"data": {"step": self.claims, "total": self.claims, "unit": "claims"} "data": {"step": self.claims, "total": self.claims, "unit": "claims"}
}) })
async def blockchain_sync_claim_signatures(self): async def blockchain_sync_supports(self):
await self.generate_steps("blockchain.sync.claim.signatures", self.claims, "claims", 0.5, 1000) await self.generate_steps("blockchain.sync.supports", 5, "supports")
async def blockchain_sync_support_signatures(self):
await self.generate_steps("blockchain.sync.support.signatures", self.supports, "supports", 0.5, 1000)
async def blockchain_sync_claim_stakes(self):
await self.generate_steps("blockchain.sync.claim.stakes", 1, "claims", 0.5)
async def blockchain_sync_claim_channels(self):
await self.generate_steps("blockchain.sync.claim.channels", 0, "supports", 0.5)
async def main(): async def main():

View file

@ -469,7 +469,7 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
self.sync.on_progress.listen(events.append) self.sync.on_progress.listen(events.append)
# initial_sync = True # initial_sync = True
await self.sync.advance(True) await self.sync.advance()
await asyncio.sleep(1) # give it time to collect events await asyncio.sleep(1) # give it time to collect events
self.assertEqual( self.assertEqual(
events[0], { events[0], {
@ -477,13 +477,6 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
"data": { "data": {
"starting_height": 0, "starting_height": 0,
"ending_height": 352, "ending_height": 352,
"sync_steps": [
"blockchain.sync.block.read",
"blockchain.sync.block.save",
"db.sync.input",
"blockchain.sync.claim.meta",
"blockchain.sync.claim.signatures",
"blockchain.sync.support.signatures"],
"files": 3, "files": 3,
"blocks": 353, "blocks": 353,
"txs": 544, "txs": 544,
@ -508,33 +501,28 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
self.assertEventsAlmostEqual( self.assertEventsAlmostEqual(
self.extract_block_events('blockchain.sync.block.save', events), [ self.extract_block_events('blockchain.sync.block.save', events), [
[0, 0, 280], [0, 0, 280],
[0, 5, 280], [0, 19, 280],
[0, 13, 280], [0, 47, 280],
[0, 76, 280], [0, 267, 280],
[0, 79, 280], [0, 278, 280],
[0, 128, 280],
[0, 277, 280],
[0, 280, 280], [0, 280, 280],
[1, 0, 178], [1, 0, 178],
[1, 1, 178], [1, 6, 178],
[1, 4, 178], [1, 19, 178],
[1, 42, 178], [1, 167, 178],
[1, 44, 178], [1, 175, 178],
[1, 77, 178],
[1, 176, 178],
[1, 178, 178], [1, 178, 178],
[2, 0, 86], [2, 0, 86],
[2, 5, 86], [2, 11, 86],
[2, 9, 86], [2, 24, 86],
[2, 31, 86],
[2, 44, 86],
[2, 83, 86], [2, 83, 86],
[2, 85, 86],
[2, 86, 86], [2, 86, 86],
] ]
) )
# 3 - db.sync.input # 3 - blockchain.sync.spends
self.assertEventsAlmostEqual( self.assertEventsAlmostEqual(
self.extract_events('db.sync.input', events), [ self.extract_events('blockchain.sync.spends', events), [
[0, 9], [0, 9],
[1, 9], [1, 9],
[2, 9], [2, 9],
@ -547,31 +535,26 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
[9, 9], [9, 9],
] ]
) )
# 4 - blockchain.sync.claim.meta # 4 - blockchain.sync.claims
self.assertEqual( self.assertEqual(
[[0, 3610], [3610, 3610]], [[0, 3610], [3610, 3610]],
self.extract_events('blockchain.sync.claim.meta', events) self.extract_events('blockchain.sync.claims', events)
) )
# 5 - blockchain.sync.claim.signatures # 4 - blockchain.sync.supports
self.assertEqual( self.assertEqual(
[[0, 0]], self.extract_events('blockchain.sync.claim.signatures', events) [[0, 2], [2, 2]],
) self.extract_events('blockchain.sync.supports', events)
# 6 - blockchain.sync.support.signatures
self.assertEqual(
[[0, 0]], self.extract_events('blockchain.sync.support.signatures', events)
)
# Complete
self.assertEqual(
[[6, 6]], self.extract_events('blockchain.sync.complete', events)
) )
# initial_sync = False & no new blocks # initial_sync = False & no new blocks
events.clear() events.clear()
await self.sync.advance() # should be no-op await self.sync.advance() # should be no-op
await asyncio.sleep(1) # give it time to collect events await asyncio.sleep(1) # give it time to collect events
self.assertListEqual([], events) self.assertEqual([[0, 0]], self.extract_events('blockchain.sync.claims', events))
self.assertEqual([[0, 0]], self.extract_events('blockchain.sync.supports', events))
# initial_sync = False # initial_sync = False
events.clear()
txid = await self.chain.claim_name('foo', 'beef', '0.01') txid = await self.chain.claim_name('foo', 'beef', '0.01')
await self.chain.generate(1) await self.chain.generate(1)
tx = Transaction(unhexlify(await self.chain.get_raw_transaction(txid))) tx = Transaction(unhexlify(await self.chain.get_raw_transaction(txid)))
@ -586,21 +569,6 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
"data": { "data": {
"starting_height": 353, "starting_height": 353,
"ending_height": 354, "ending_height": 354,
"sync_steps": [
"blockchain.sync.block.read",
"blockchain.sync.block.save",
"db.sync.input",
"db.sync.claim.delete",
"db.sync.claim.insert",
"db.sync.claim.update",
"db.sync.support.delete",
"db.sync.support.insert",
"blockchain.sync.claim.trie",
"blockchain.sync.claim.meta",
"blockchain.sync.claim.signatures",
"blockchain.sync.support.signatures",
"blockchain.sync.claim.stakes",
"blockchain.sync.claim.channels"],
"files": 1, "files": 1,
"blocks": 2, "blocks": 2,
"txs": 4, "txs": 4,
@ -625,38 +593,22 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
[2, 4, 4], [2, 4, 4],
] ]
) )
# 3 - db.sync.input # 3 - blockchain.sync.spends
self.assertEqual( self.assertEqual(
self.extract_events('db.sync.input', events), [ self.extract_events('blockchain.sync.spends', events), [
[0, 2], # started [0, 2],
[1, 2], # updating addresses on inputs [1, 2],
[2, 2], # connect spent outputs to their inputs [2, 2],
] ]
) )
# 4 - db.sync.claim.delete # 4 - blockchain.sync.claims
self.assertEqual([[0, 1], [1, 1]], self.extract_events('db.sync.claim.delete', events)) self.assertEqual(
# 5 - db.sync.claim.insert self.extract_events('blockchain.sync.claims', events), [
self.assertEqual([[0, 1], [1, 1]], self.extract_events('db.sync.claim.insert', events)) [0, 3], [1, 3], [2, 3], [3, 3]
# 6 - db.sync.claim.update ]
self.assertEqual([[0, 0]], self.extract_events('db.sync.claim.update', events)) )
# 7 - db.sync.support.delete # 5 - blockchain.sync.supports
self.assertEqual([[0, 1], [1, 1]], self.extract_events('db.sync.support.delete', events)) self.assertEqual([[0, 1], [1, 1]], self.extract_events('blockchain.sync.supports', events))
# 8 - db.sync.support.insert
self.assertEqual([[0, 1], [1, 1]], self.extract_events('db.sync.support.insert', events))
# 9 - blockchain.sync.claim.trie
self.assertEqual([[0, 1], [1, 1]], self.extract_events('blockchain.sync.claim.trie', events))
# 10 - blockchain.sync.claim.meta
self.assertEqual([[0, 1], [1, 1]], self.extract_events('blockchain.sync.claim.meta', events))
# 11 - blockchain.sync.claim.signatures
self.assertEqual([[0, 0]], self.extract_events("blockchain.sync.claim.signatures", events))
# 12 - blockchain.sync.support.signatures
self.assertEqual([[0, 0]], self.extract_events("blockchain.sync.support.signatures", events))
# 13 - blockchain.sync.claim.stakes
self.assertEqual([[0, 1], [1, 1]], self.extract_events("blockchain.sync.claim.stakes", events))
# 14 - blockchain.sync.claim.channels
self.assertEqual([[0, 0]], self.extract_events("blockchain.sync.claim.channels", events))
# Complete
self.assertEqual([[14, 14]], self.extract_events('blockchain.sync.complete', events))
class TestGeneralBlockchainSync(SyncingBlockchainTestCase): class TestGeneralBlockchainSync(SyncingBlockchainTestCase):