diff --git a/scribe/db/db.py b/scribe/db/db.py index 60eb717..44a0b20 100644 --- a/scribe/db/db.py +++ b/scribe/db/db.py @@ -766,7 +766,7 @@ class HubDB: async def _read_tx_hashes(self): def _read_tx_hashes(): - return list(self.prefix_db.tx_hash.iterate(start=(0,), stop=(self.db_tx_count + 1), include_key=False, fill_cache=False, deserialize_value=False)) + return list(self.prefix_db.tx_hash.iterate(start=(0,), stop=(self.db_tx_count + 1,), include_key=False, fill_cache=False, deserialize_value=False)) self.logger.info("loading tx hashes") self.total_transactions.clear() diff --git a/scribe/reader/elastic_sync.py b/scribe/reader/elastic_sync.py index ecec17a..e858683 100644 --- a/scribe/reader/elastic_sync.py +++ b/scribe/reader/elastic_sync.py @@ -220,7 +220,6 @@ class ElasticWriter(BaseBlockchainReader): def advance(self, height: int): super().advance(height) - touched_or_deleted = self.db.prefix_db.touched_or_deleted.get(height) for k, v in self.db.prefix_db.trending_notification.iterate((height,)): self._trending[k.claim_hash].append(TrendingNotification(k.height, v.previous_amount, v.new_amount)) @@ -236,8 +235,8 @@ class ElasticWriter(BaseBlockchainReader): self._advanced = True def unwind(self): - self.db.tx_counts.pop() - reverted_block_hash = self.db.coin.header_hash(self.db.headers.pop()) + reverted_block_hash = self.db.coin.header_hash(self.db.headers[-1]) + super().unwind() packed = self.db.prefix_db.undo.get(len(self.db.tx_counts), reverted_block_hash) touched_or_deleted = None claims_to_delete = [] diff --git a/scribe/reader/interface.py b/scribe/reader/interface.py index 84841e8..df6a31a 100644 --- a/scribe/reader/interface.py +++ b/scribe/reader/interface.py @@ -141,12 +141,24 @@ class BaseBlockchainReader(BlockchainReaderInterface): tx_count = self.db.prefix_db.tx_count.get(height).tx_count assert tx_count not in self.db.tx_counts, f'boom {tx_count} in {len(self.db.tx_counts)} tx counts' assert len(self.db.tx_counts) == height, f"{len(self.db.tx_counts)} != {height}" + prev_count = self.db.tx_counts[-1] self.db.tx_counts.append(tx_count) + if self.db._cache_all_tx_hashes: + for tx_num in range(prev_count, tx_count): + tx_hash = self.db.prefix_db.tx_hash.get(tx_num).tx_hash + self.db.total_transactions.append(tx_hash) + self.db.tx_num_mapping[tx_hash] = tx_count + assert len(self.db.total_transactions) == tx_count, f"{len(self.db.total_transactions)} vs {tx_count}" self.db.headers.append(self.db.prefix_db.header.get(height, deserialize_value=False)) def unwind(self): - self.db.tx_counts.pop() + prev_count = self.db.tx_counts.pop() + tx_count = self.db.tx_counts[-1] self.db.headers.pop() + if self.db._cache_all_tx_hashes: + for _ in range(prev_count - tx_count): + self.db.tx_num_mapping.pop(self.db.total_transactions.pop()) + assert len(self.db.total_transactions) == tx_count, f"{len(self.db.total_transactions)} vs {tx_count}" def _start_cancellable(self, run, *args): _flag = asyncio.Event()