run vacuum after every sync

This commit is contained in:
Lex Berezhny 2020-07-14 13:26:32 -04:00
parent 86069b10ca
commit 1c29ae7204
6 changed files with 50 additions and 8 deletions

View file

@ -128,13 +128,19 @@ def sync_spends(initial_sync: bool, p: ProgressContext):
p.ctx.execute(text(constraint)) p.ctx.execute(text(constraint))
p.step() p.step()
else: else:
p.start(2) p.start(3)
# 1. Update spent TXOs setting spent_height # 1. Update spent TXOs setting spent_height
update_spent_outputs(p.ctx) update_spent_outputs(p.ctx)
p.step() p.step()
# 2. Update TXIs to have the address of TXO they are spending. # 2. Update TXIs to have the address of TXO they are spending.
set_input_addresses(p.ctx) set_input_addresses(p.ctx)
p.step() p.step()
# 3. Update visibility map, which speeds up index-only scans.
if p.ctx.is_postgres:
with p.ctx.engine.connect() as c:
c.execute(text("COMMIT;"))
c.execute(text("VACUUM txo;"))
p.step()
@event_emitter("blockchain.sync.filter.generate", "blocks") @event_emitter("blockchain.sync.filter.generate", "blocks")

View file

@ -159,6 +159,19 @@ def claims_constraints_and_indexes(p: ProgressContext):
p.step() p.step()
@event_emitter("blockchain.sync.claims.vacuum", "steps")
def claims_vacuum(p: ProgressContext):
p.start(2)
with p.ctx.engine.connect() as c:
if p.ctx.is_postgres:
c.execute(text("COMMIT;"))
c.execute(text("VACUUM claim;"))
p.step()
if p.ctx.is_postgres:
c.execute(text("VACUUM tag;"))
p.step()
@event_emitter("blockchain.sync.claims.update", "claims") @event_emitter("blockchain.sync.claims.update", "claims")
def claims_update(blocks: Tuple[int, int], p: ProgressContext): def claims_update(blocks: Tuple[int, int], p: ProgressContext):
p.start( p.start(

View file

@ -63,6 +63,13 @@ def supports_insert(blocks: Tuple[int, int], missing_in_supports_table: bool, p:
p.add(loader.flush(Support)) p.add(loader.flush(Support))
@event_emitter("blockchain.sync.supports.delete", "supports")
def supports_delete(supports, p: ProgressContext):
p.start(supports, label="del supprt")
deleted = p.ctx.execute(Support.delete().where(where_abandoned_supports()))
p.step(deleted.rowcount)
@event_emitter("blockchain.sync.supports.indexes", "steps") @event_emitter("blockchain.sync.supports.indexes", "steps")
def supports_constraints_and_indexes(p: ProgressContext): def supports_constraints_and_indexes(p: ProgressContext):
p.start(1 + len(pg_add_support_constraints_and_indexes)) p.start(1 + len(pg_add_support_constraints_and_indexes))
@ -77,8 +84,11 @@ def supports_constraints_and_indexes(p: ProgressContext):
p.step() p.step()
@event_emitter("blockchain.sync.supports.delete", "supports") @event_emitter("blockchain.sync.supports.vacuum", "steps")
def supports_delete(supports, p: ProgressContext): def supports_vacuum(p: ProgressContext):
p.start(supports, label="del supprt") p.start(1)
deleted = p.ctx.execute(Support.delete().where(where_abandoned_supports())) with p.ctx.engine.connect() as c:
p.step(deleted.rowcount) if p.ctx.is_postgres:
c.execute(text("COMMIT;"))
c.execute(text("VACUUM support;"))
p.step()

View file

@ -233,6 +233,8 @@ class BlockchainSync(Sync):
await self.db.run(claim_phase.update_stakes, blocks, claims_with_changed_supports) await self.db.run(claim_phase.update_stakes, blocks, claims_with_changed_supports)
if initial_sync: if initial_sync:
await self.db.run(claim_phase.claims_constraints_and_indexes) await self.db.run(claim_phase.claims_constraints_and_indexes)
else:
await self.db.run(claim_phase.claims_vacuum)
return initial_sync return initial_sync
async def sync_supports(self, blocks): async def sync_supports(self, blocks):
@ -267,6 +269,8 @@ class BlockchainSync(Sync):
await self.db.run(support_phase.supports_delete, delete_supports) await self.db.run(support_phase.supports_delete, delete_supports)
if initial_sync: if initial_sync:
await self.db.run(support_phase.supports_constraints_and_indexes) await self.db.run(support_phase.supports_constraints_and_indexes)
else:
await self.db.run(support_phase.supports_vacuum)
async def sync_channel_stats(self, blocks, initial_sync): async def sync_channel_stats(self, blocks, initial_sync):
await self.db.run(claim_phase.update_channel_stats, blocks, initial_sync) await self.db.run(claim_phase.update_channel_stats, blocks, initial_sync)

View file

@ -645,7 +645,6 @@ class BulkLoader:
def update_claim(self, txo: Output, **extra): def update_claim(self, txo: Output, **extra):
d, tags = self.claim_to_rows(txo, **extra) d, tags = self.claim_to_rows(txo, **extra)
d['pk'] = txo.claim_hash d['pk'] = txo.claim_hash
d['set_canonical_url'] = d['is_signature_valid']
self.update_claims.append(d) self.update_claims.append(d)
self.delete_tags.append({'pk': txo.claim_hash}) self.delete_tags.append({'pk': txo.claim_hash})
self.tags.extend(tags) self.tags.extend(tags)

View file

@ -639,7 +639,7 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
) )
self.assertConsumingEvents( self.assertConsumingEvents(
events, "blockchain.sync.spends.main", ("steps",), [ events, "blockchain.sync.spends.main", ("steps",), [
(0, None, (2,), (1,), (2,)) (0, None, (3,), (1,), (2,), (3,))
] ]
) )
self.assertConsumingEvents( self.assertConsumingEvents(
@ -668,6 +668,11 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
(0, None, (1,), (1,)), (0, None, (1,), (1,)),
] ]
) )
self.assertConsumingEvents(
events, "blockchain.sync.claims.vacuum", ("steps",), [
(0, None, (2,), (1,), (2,))
]
)
self.assertEqual( self.assertEqual(
events.pop(0), { events.pop(0), {
"event": "blockchain.sync.claims.main", "event": "blockchain.sync.claims.main",
@ -690,6 +695,11 @@ class TestMultiBlockFileSyncing(BasicBlockchainTestCase):
(353, "add supprt 353- 354", (1,), (1,)), (353, "add supprt 353- 354", (1,), (1,)),
] ]
) )
self.assertConsumingEvents(
events, "blockchain.sync.supports.vacuum", ("steps",), [
(0, None, (1,), (1,))
]
)
self.assertEqual( self.assertEqual(
events.pop(0), { events.pop(0), {
"event": "blockchain.sync.supports.main", "event": "blockchain.sync.supports.main",