lbry-sdk/lbry/blockchain/sync/supports.py

91 lines
3.1 KiB
Python
Raw Normal View History

2020-07-12 00:18:33 +02:00
import logging
from typing import Tuple
2020-07-13 06:55:30 +02:00
from sqlalchemy import case, desc, text
2020-07-12 00:18:33 +02:00
from sqlalchemy.future import select
2020-07-13 06:55:30 +02:00
from lbry.db.tables import TX, TXO, Support, pg_add_support_constraints_and_indexes
2020-07-12 00:18:33 +02:00
from lbry.db.query_context import ProgressContext, event_emitter
from lbry.db.queries import row_to_txo
from lbry.db.constants import TXO_TYPES
from lbry.db.queries.txio import (
minimum_txo_columns,
where_unspent_txos, where_abandoned_supports,
count_unspent_txos,
)
from .claims import make_label
log = logging.getLogger(__name__)
@event_emitter("blockchain.sync.supports.insert", "supports")
def supports_insert(blocks: Tuple[int, int], missing_in_supports_table: bool, p: ProgressContext):
p.start(
count_unspent_txos(
TXO_TYPES['support'], blocks,
missing_in_supports_table=missing_in_supports_table,
2020-07-13 19:12:01 +02:00
), progress_id=blocks[0], label=make_label("add supprt", blocks)
2020-07-12 00:18:33 +02:00
)
channel_txo = TXO.alias('channel_txo')
select_supports = select(
*minimum_txo_columns, TXO.c.claim_hash,
TXO.c.signature, TXO.c.signature_digest,
case([(
TXO.c.channel_hash.isnot(None),
select(channel_txo.c.public_key).select_from(channel_txo).where(
(channel_txo.c.txo_type == TXO_TYPES['channel']) &
(channel_txo.c.claim_hash == TXO.c.channel_hash) &
(channel_txo.c.height <= TXO.c.height)
).order_by(desc(channel_txo.c.height)).limit(1).scalar_subquery()
)]).label('channel_public_key'),
).select_from(
TXO.join(TX)
).where(
where_unspent_txos(
TXO_TYPES['support'], blocks,
missing_in_supports_table=missing_in_supports_table,
)
)
with p.ctx.connect_streaming() as c:
2020-07-12 00:18:33 +02:00
loader = p.ctx.get_bulk_loader()
for row in c.execute(select_supports):
txo = row_to_txo(row)
loader.add_support(
txo,
signature=row.signature,
signature_digest=row.signature_digest,
channel_public_key=row.channel_public_key
)
if len(loader.supports) >= 25_000:
p.add(loader.flush(Support))
p.add(loader.flush(Support))
2020-07-14 19:26:32 +02:00
@event_emitter("blockchain.sync.supports.delete", "supports")
def supports_delete(supports, p: ProgressContext):
p.start(supports, label="del supprt")
deleted = p.ctx.execute(Support.delete().where(where_abandoned_supports()))
p.step(deleted.rowcount)
2020-07-13 06:55:30 +02:00
@event_emitter("blockchain.sync.supports.indexes", "steps")
def supports_constraints_and_indexes(p: ProgressContext):
2020-07-14 03:00:24 +02:00
p.start(1 + len(pg_add_support_constraints_and_indexes))
2020-07-13 06:55:30 +02:00
if p.ctx.is_postgres:
p.ctx.execute_notx(text("VACUUM ANALYZE support;"))
2020-07-13 06:55:30 +02:00
p.step()
2020-07-14 03:00:24 +02:00
for constraint in pg_add_support_constraints_and_indexes:
if p.ctx.is_postgres:
p.ctx.execute(text(constraint))
2020-07-14 03:00:24 +02:00
p.step()
2020-07-13 06:55:30 +02:00
2020-07-14 19:26:32 +02:00
@event_emitter("blockchain.sync.supports.vacuum", "steps")
def supports_vacuum(p: ProgressContext):
p.start(1)
if p.ctx.is_postgres:
p.ctx.execute_notx(text("VACUUM support;"))
p.step()