expose cache size settings

This commit is contained in:
Jack Robison 2022-07-29 14:19:56 -04:00
parent 9df8f9c651
commit 34e3f9ecee
3 changed files with 16 additions and 8 deletions

View file

@ -12,7 +12,8 @@ class ServerEnv(Env):
database_query_timeout=None, elastic_notifier_host=None, elastic_notifier_port=None, database_query_timeout=None, elastic_notifier_host=None, elastic_notifier_port=None,
blocking_channel_ids=None, filtering_channel_ids=None, peer_hubs=None, peer_announce=None, blocking_channel_ids=None, filtering_channel_ids=None, peer_hubs=None, peer_announce=None,
index_address_status=None, address_history_cache_size=None, daemon_ca_path=None, index_address_status=None, address_history_cache_size=None, daemon_ca_path=None,
merkle_cache_size=None, resolved_url_cache_size=None, tx_cache_size=None): merkle_cache_size=None, resolved_url_cache_size=None, tx_cache_size=None,
history_tx_cache_size=None):
super().__init__(db_dir, max_query_workers, chain, reorg_limit, prometheus_port, cache_all_tx_hashes, super().__init__(db_dir, max_query_workers, chain, reorg_limit, prometheus_port, cache_all_tx_hashes,
cache_all_claim_txos, blocking_channel_ids, filtering_channel_ids, index_address_status) cache_all_claim_txos, blocking_channel_ids, filtering_channel_ids, index_address_status)
self.daemon_url = daemon_url if daemon_url is not None else self.required('DAEMON_URL') self.daemon_url = daemon_url if daemon_url is not None else self.required('DAEMON_URL')
@ -59,6 +60,8 @@ class ServerEnv(Env):
'RESOLVED_URL_CACHE_SIZE', 32768) 'RESOLVED_URL_CACHE_SIZE', 32768)
self.tx_cache_size = tx_cache_size if tx_cache_size is not None else self.integer( self.tx_cache_size = tx_cache_size if tx_cache_size is not None else self.integer(
'TX_CACHE_SIZE', 32768) 'TX_CACHE_SIZE', 32768)
self.history_tx_cache_size = history_tx_cache_size if history_tx_cache_size is not None else \
self.integer('HISTORY_TX_CACHE_SIZE', 524288)
@classmethod @classmethod
def contribute_to_arg_parser(cls, parser): def contribute_to_arg_parser(cls, parser):
@ -123,6 +126,11 @@ class ServerEnv(Env):
default=cls.integer('TX_CACHE_SIZE', 32768), default=cls.integer('TX_CACHE_SIZE', 32768),
help="Size of the lru cache of transactions. " help="Size of the lru cache of transactions. "
"Can be set in the env with 'TX_CACHE_SIZE'") "Can be set in the env with 'TX_CACHE_SIZE'")
parser.add_argument('--history_tx_cache_size', type=int,
default=cls.integer('HISTORY_TX_CACHE_SIZE', 524288),
help="Size of the lfu cache of txids in transaction histories for addresses. "
"Can be set in the env with 'HISTORY_TX_CACHE_SIZE'")
@classmethod @classmethod
def from_arg_parser(cls, args): def from_arg_parser(cls, args):
return cls( return cls(
@ -140,5 +148,5 @@ class ServerEnv(Env):
elastic_notifier_port=args.elastic_notifier_port, index_address_status=args.index_address_statuses, elastic_notifier_port=args.elastic_notifier_port, index_address_status=args.index_address_statuses,
address_history_cache_size=args.address_history_cache_size, daemon_ca_path=args.daemon_ca_path, address_history_cache_size=args.address_history_cache_size, daemon_ca_path=args.daemon_ca_path,
merkle_cache_size=args.merkle_cache_size, resolved_url_cache_size=args.resolved_url_cache_size, merkle_cache_size=args.merkle_cache_size, resolved_url_cache_size=args.resolved_url_cache_size,
tx_cache_size=args.tx_cache_size tx_cache_size=args.tx_cache_size, history_tx_cache_size=args.history_tx_cache_size
) )

View file

@ -214,11 +214,11 @@ class SessionManager:
) )
self.running = False self.running = False
# hashX: List[int] # hashX: List[int]
self.hashX_raw_history_cache = LFUCacheWithMetrics(2 ** 16, metric_name='raw_history', namespace=NAMESPACE) self.hashX_raw_history_cache = LFUCacheWithMetrics(env.hashX_history_cache_size, metric_name='raw_history', namespace=NAMESPACE)
# hashX: List[CachedAddressHistoryItem] # hashX: List[CachedAddressHistoryItem]
self.hashX_history_cache = LFUCacheWithMetrics(2 ** 14, metric_name='full_history', namespace=NAMESPACE) self.hashX_history_cache = LFUCacheWithMetrics(env.hashX_history_cache_size, metric_name='full_history', namespace=NAMESPACE)
# tx_num: Tuple[txid, height] # tx_num: Tuple[txid, height]
self.history_tx_info_cache = LFUCacheWithMetrics(2 ** 17, metric_name='history_tx', namespace=NAMESPACE) self.history_tx_info_cache = LFUCacheWithMetrics(env.history_tx_cache_size, metric_name='history_tx', namespace=NAMESPACE)
def clear_caches(self): def clear_caches(self):
self.resolve_cache.clear() self.resolve_cache.clear()

View file

@ -18,7 +18,7 @@ class BlockchainEnv(Env):
if isinstance(rebuild_address_status_from_height, int) else -1 if isinstance(rebuild_address_status_from_height, int) else -1
self.daemon_ca_path = daemon_ca_path if daemon_ca_path else None self.daemon_ca_path = daemon_ca_path if daemon_ca_path else None
self.history_tx_cache_size = history_tx_cache_size if history_tx_cache_size is not None else \ self.history_tx_cache_size = history_tx_cache_size if history_tx_cache_size is not None else \
self.integer('HISTORY_TX_CACHE_SIZE', 262144) self.integer('HISTORY_TX_CACHE_SIZE', 524288)
@classmethod @classmethod
def contribute_to_arg_parser(cls, parser): def contribute_to_arg_parser(cls, parser):
@ -42,9 +42,9 @@ class BlockchainEnv(Env):
help="Rebuild address statuses, set to 0 to reindex all address statuses or provide a " help="Rebuild address statuses, set to 0 to reindex all address statuses or provide a "
"block height to start reindexing from. Defaults to -1 (off).") "block height to start reindexing from. Defaults to -1 (off).")
parser.add_argument('--history_tx_cache_size', type=int, parser.add_argument('--history_tx_cache_size', type=int,
default=cls.integer('HISTORY_TX_CACHE_SIZE', 262144), default=cls.integer('HISTORY_TX_CACHE_SIZE', 524288),
help="Size of the lfu cache of txids in transaction histories for addresses. " help="Size of the lfu cache of txids in transaction histories for addresses. "
"Can be set in the env with 'TX_CACHE_SIZE'") "Can be set in the env with 'HISTORY_TX_CACHE_SIZE'")
@classmethod @classmethod
def from_arg_parser(cls, args): def from_arg_parser(cls, args):