expose merkle_cache_size setting

This commit is contained in:
Jack Robison 2022-07-16 10:38:50 -04:00
parent 78e9d7b50b
commit a41abc870d
4 changed files with 14 additions and 7 deletions

View file

@ -40,7 +40,7 @@ class SecondaryDB:
cache_all_claim_txos: bool = False, cache_all_tx_hashes: bool = False, cache_all_claim_txos: bool = False, cache_all_tx_hashes: bool = False,
blocking_channel_ids: List[str] = None, blocking_channel_ids: List[str] = None,
filtering_channel_ids: List[str] = None, executor: ThreadPoolExecutor = None, filtering_channel_ids: List[str] = None, executor: ThreadPoolExecutor = None,
index_address_status=False): index_address_status=False, merkle_cache_size=32768):
self.logger = logging.getLogger(__name__) self.logger = logging.getLogger(__name__)
self.coin = coin self.coin = coin
self._executor = executor self._executor = executor
@ -92,7 +92,7 @@ class SecondaryDB:
# lru cache of tx_hash: (tx_bytes, tx_num, position, tx_height) # lru cache of tx_hash: (tx_bytes, tx_num, position, tx_height)
self.tx_cache = LRUCacheWithMetrics(2 ** 15, metric_name='tx', namespace=NAMESPACE) self.tx_cache = LRUCacheWithMetrics(2 ** 15, metric_name='tx', namespace=NAMESPACE)
# lru cache of block heights to merkle trees of the block tx hashes # lru cache of block heights to merkle trees of the block tx hashes
self.merkle_cache = LRUCacheWithMetrics(2 ** 14, metric_name='merkle', namespace=NAMESPACE) self.merkle_cache = LRUCacheWithMetrics(merkle_cache_size, metric_name='merkle', namespace=NAMESPACE)
# these are only used if the cache_all_tx_hashes setting is on # these are only used if the cache_all_tx_hashes setting is on
self.total_transactions: List[bytes] = [] self.total_transactions: List[bytes] = []

View file

@ -9,10 +9,10 @@ class HeraldDB(SecondaryDB):
cache_all_claim_txos: bool = False, cache_all_tx_hashes: bool = False, cache_all_claim_txos: bool = False, cache_all_tx_hashes: bool = False,
blocking_channel_ids: List[str] = None, blocking_channel_ids: List[str] = None,
filtering_channel_ids: List[str] = None, executor: ThreadPoolExecutor = None, filtering_channel_ids: List[str] = None, executor: ThreadPoolExecutor = None,
index_address_status=False): index_address_status=False, merkle_cache_size=32768):
super().__init__(coin, db_dir, secondary_name, max_open_files, reorg_limit, cache_all_claim_txos, super().__init__(coin, db_dir, secondary_name, max_open_files, reorg_limit, cache_all_claim_txos,
cache_all_tx_hashes, blocking_channel_ids, filtering_channel_ids, executor, cache_all_tx_hashes, blocking_channel_ids, filtering_channel_ids, executor,
index_address_status) index_address_status, merkle_cache_size)
# self.headers = None # self.headers = None
# async def _read_headers(self): # async def _read_headers(self):

View file

@ -11,7 +11,8 @@ class ServerEnv(Env):
session_timeout=None, drop_client=None, description=None, daily_fee=None, session_timeout=None, drop_client=None, description=None, daily_fee=None,
database_query_timeout=None, elastic_notifier_host=None, elastic_notifier_port=None, database_query_timeout=None, elastic_notifier_host=None, elastic_notifier_port=None,
blocking_channel_ids=None, filtering_channel_ids=None, peer_hubs=None, peer_announce=None, blocking_channel_ids=None, filtering_channel_ids=None, peer_hubs=None, peer_announce=None,
index_address_status=None, address_history_cache_size=None, daemon_ca_path=None): index_address_status=None, address_history_cache_size=None, daemon_ca_path=None,
merkle_cache_size=None):
super().__init__(db_dir, max_query_workers, chain, reorg_limit, prometheus_port, cache_all_tx_hashes, super().__init__(db_dir, max_query_workers, chain, reorg_limit, prometheus_port, cache_all_tx_hashes,
cache_all_claim_txos, blocking_channel_ids, filtering_channel_ids, index_address_status) cache_all_claim_txos, blocking_channel_ids, filtering_channel_ids, index_address_status)
self.daemon_url = daemon_url if daemon_url is not None else self.required('DAEMON_URL') self.daemon_url = daemon_url if daemon_url is not None else self.required('DAEMON_URL')
@ -53,6 +54,7 @@ class ServerEnv(Env):
self.hashX_history_cache_size = address_history_cache_size if address_history_cache_size is not None \ self.hashX_history_cache_size = address_history_cache_size if address_history_cache_size is not None \
else self.integer('ADDRESS_HISTORY_CACHE_SIZE', 4096) else self.integer('ADDRESS_HISTORY_CACHE_SIZE', 4096)
self.daemon_ca_path = daemon_ca_path if daemon_ca_path else None self.daemon_ca_path = daemon_ca_path if daemon_ca_path else None
self.merkle_cache_size = merkle_cache_size if merkle_cache_size is not None else self.integer('MERKLE_CACHE_SIZE', 32768)
@classmethod @classmethod
def contribute_to_arg_parser(cls, parser): def contribute_to_arg_parser(cls, parser):
@ -105,6 +107,10 @@ class ServerEnv(Env):
default=cls.integer('ADDRESS_HISTORY_CACHE_SIZE', 4096), default=cls.integer('ADDRESS_HISTORY_CACHE_SIZE', 4096),
help="Size of the lru cache of address histories. " help="Size of the lru cache of address histories. "
"Can be set in the env with 'ADDRESS_HISTORY_CACHE_SIZE'") "Can be set in the env with 'ADDRESS_HISTORY_CACHE_SIZE'")
parser.add_argument('--merkle_cache_size', type=int,
default=cls.integer('MERKLE_CACHE_SIZE', 32768),
help="Size of the lru cache of merkle trees for txs in blocks. "
"Can be set in the env with 'MERKLE_CACHE_SIZE'")
@classmethod @classmethod
def from_arg_parser(cls, args): def from_arg_parser(cls, args):
@ -121,5 +127,6 @@ class ServerEnv(Env):
database_query_timeout=args.query_timeout_ms, blocking_channel_ids=args.blocking_channel_ids, database_query_timeout=args.query_timeout_ms, blocking_channel_ids=args.blocking_channel_ids,
filtering_channel_ids=args.filtering_channel_ids, elastic_notifier_host=args.elastic_notifier_host, filtering_channel_ids=args.filtering_channel_ids, elastic_notifier_host=args.elastic_notifier_host,
elastic_notifier_port=args.elastic_notifier_port, index_address_status=args.index_address_statuses, elastic_notifier_port=args.elastic_notifier_port, index_address_status=args.index_address_statuses,
address_history_cache_size=args.address_history_cache_size, daemon_ca_path=args.daemon_ca_path address_history_cache_size=args.address_history_cache_size, daemon_ca_path=args.daemon_ca_path,
merkle_cache_size=args.merkle_cache_size
) )

View file

@ -42,7 +42,7 @@ class HubServerService(BlockchainReaderService):
env.coin, env.db_dir, self.secondary_name, -1, env.reorg_limit, env.cache_all_claim_txos, env.coin, env.db_dir, self.secondary_name, -1, env.reorg_limit, env.cache_all_claim_txos,
env.cache_all_tx_hashes, blocking_channel_ids=env.blocking_channel_ids, env.cache_all_tx_hashes, blocking_channel_ids=env.blocking_channel_ids,
filtering_channel_ids=env.filtering_channel_ids, executor=self._executor, filtering_channel_ids=env.filtering_channel_ids, executor=self._executor,
index_address_status=env.index_address_status index_address_status=env.index_address_status, merkle_cache_size=env.merkle_cache_size
) )
def clear_caches(self): def clear_caches(self):