partial fix for https://github.com/lbryio/scribe/issues/2
This commit is contained in:
parent
bd00cb7d47
commit
356e613b24
2 changed files with 16 additions and 6 deletions
|
@ -5,6 +5,8 @@ import typing
|
|||
import logging
|
||||
from collections import defaultdict
|
||||
from prometheus_client import Histogram
|
||||
|
||||
import rocksdb.errors
|
||||
from scribe import PROMETHEUS_NAMESPACE
|
||||
from scribe.common import HISTOGRAM_BUCKETS
|
||||
from scribe.blockchain.transaction.deserializer import Deserializer
|
||||
|
@ -53,9 +55,17 @@ class MemPool:
|
|||
|
||||
def refresh(self) -> typing.Set[bytes]: # returns list of new touched hashXs
|
||||
prefix_db = self._db.prefix_db
|
||||
new_mempool = {k.tx_hash: v.raw_tx for k, v in prefix_db.mempool_tx.iterate()} # TODO: make this more efficient
|
||||
self.raw_mempool.clear()
|
||||
self.raw_mempool.update(new_mempool)
|
||||
try:
|
||||
new_mempool = {k.tx_hash: v.raw_tx for k, v in prefix_db.mempool_tx.iterate()} # TODO: make this more efficient
|
||||
except rocksdb.errors.RocksIOError as err:
|
||||
# FIXME: why does this happen? can it happen elsewhere?
|
||||
if err.args[0].startswith(b'IO error: No such file or directory: While open a file for random read:'):
|
||||
self.logger.error("failed to process mempool, retrying later")
|
||||
return set()
|
||||
raise err
|
||||
else:
|
||||
self.raw_mempool.clear()
|
||||
self.raw_mempool.update(new_mempool)
|
||||
|
||||
# hashXs = self.hashXs # hashX: [tx_hash, ...]
|
||||
touched_hashXs = set()
|
||||
|
|
|
@ -191,9 +191,9 @@ class BlockchainReaderService(BlockchainService):
|
|||
def _detect_changes(self):
|
||||
try:
|
||||
self.db.prefix_db.try_catch_up_with_primary()
|
||||
except:
|
||||
self.log.exception('failed to update secondary db')
|
||||
raise
|
||||
except Exception as err:
|
||||
self.log.exception('failed to update secondary db: %s', err)
|
||||
raise err
|
||||
state = self.db.prefix_db.db_state.get()
|
||||
if not state or state.height <= 0:
|
||||
return
|
||||
|
|
Loading…
Reference in a new issue