use LFU caches
This commit is contained in:
parent
a46343c84f
commit
0918299163
3 changed files with 14 additions and 13 deletions
|
@ -18,7 +18,7 @@ from hub.schema.url import URL, normalize_name
|
||||||
from hub.schema.claim import guess_stream_type
|
from hub.schema.claim import guess_stream_type
|
||||||
from hub.schema.result import Censor
|
from hub.schema.result import Censor
|
||||||
from hub.scribe.transaction import TxInput
|
from hub.scribe.transaction import TxInput
|
||||||
from hub.common import hash_to_hex_str, LRUCacheWithMetrics
|
from hub.common import hash_to_hex_str, LRUCacheWithMetrics, LFUCacheWithMetrics
|
||||||
from hub.db.merkle import Merkle, MerkleCache, FastMerkleCacheItem
|
from hub.db.merkle import Merkle, MerkleCache, FastMerkleCacheItem
|
||||||
from hub.db.common import ResolveResult, ExpandedResolveResult, DBError, UTXO
|
from hub.db.common import ResolveResult, ExpandedResolveResult, DBError, UTXO
|
||||||
from hub.db.prefixes import PendingActivationValue, ClaimTakeoverValue, ClaimToTXOValue, PrefixDB
|
from hub.db.prefixes import PendingActivationValue, ClaimTakeoverValue, ClaimToTXOValue, PrefixDB
|
||||||
|
@ -90,9 +90,9 @@ class SecondaryDB:
|
||||||
self.header_mc = MerkleCache(self.merkle, self.fs_block_hashes)
|
self.header_mc = MerkleCache(self.merkle, self.fs_block_hashes)
|
||||||
|
|
||||||
# lru cache of tx_hash: (tx_bytes, tx_num, position, tx_height)
|
# lru cache of tx_hash: (tx_bytes, tx_num, position, tx_height)
|
||||||
self.tx_cache = LRUCacheWithMetrics(tx_cache_size, metric_name='tx', namespace=NAMESPACE)
|
self.tx_cache = LFUCacheWithMetrics(tx_cache_size, metric_name='tx', namespace=NAMESPACE)
|
||||||
# lru cache of block heights to merkle trees of the block tx hashes
|
# lru cache of block heights to merkle trees of the block tx hashes
|
||||||
self.merkle_cache = LRUCacheWithMetrics(merkle_cache_size, metric_name='merkle', namespace=NAMESPACE)
|
self.merkle_cache = LFUCacheWithMetrics(merkle_cache_size, metric_name='merkle', namespace=NAMESPACE)
|
||||||
|
|
||||||
# these are only used if the cache_all_tx_hashes setting is on
|
# these are only used if the cache_all_tx_hashes setting is on
|
||||||
self.total_transactions: List[bytes] = []
|
self.total_transactions: List[bytes] = []
|
||||||
|
|
|
@ -23,7 +23,7 @@ from hub.build_info import BUILD, COMMIT_HASH, DOCKER_TAG
|
||||||
from hub.herald.search import SearchIndex
|
from hub.herald.search import SearchIndex
|
||||||
from hub.common import sha256, hash_to_hex_str, hex_str_to_hash, HASHX_LEN, version_string, formatted_time, SIZE_BUCKETS
|
from hub.common import sha256, hash_to_hex_str, hex_str_to_hash, HASHX_LEN, version_string, formatted_time, SIZE_BUCKETS
|
||||||
from hub.common import protocol_version, RPCError, DaemonError, TaskGroup, HISTOGRAM_BUCKETS, asyncify_for_loop
|
from hub.common import protocol_version, RPCError, DaemonError, TaskGroup, HISTOGRAM_BUCKETS, asyncify_for_loop
|
||||||
from hub.common import LRUCacheWithMetrics
|
from hub.common import LRUCacheWithMetrics, LFUCacheWithMetrics
|
||||||
from hub.herald.jsonrpc import JSONRPCAutoDetect, JSONRPCConnection, JSONRPCv2, JSONRPC
|
from hub.herald.jsonrpc import JSONRPCAutoDetect, JSONRPCConnection, JSONRPCv2, JSONRPC
|
||||||
from hub.herald.common import BatchRequest, ProtocolError, Request, Batch, Notification
|
from hub.herald.common import BatchRequest, ProtocolError, Request, Batch, Notification
|
||||||
from hub.herald.framer import NewlineFramer
|
from hub.herald.framer import NewlineFramer
|
||||||
|
@ -214,11 +214,11 @@ class SessionManager:
|
||||||
)
|
)
|
||||||
self.running = False
|
self.running = False
|
||||||
# hashX: List[int]
|
# hashX: List[int]
|
||||||
self.hashX_raw_history_cache = LRUCacheWithMetrics(2 ** 16, metric_name='raw_history', namespace=NAMESPACE)
|
self.hashX_raw_history_cache = LFUCacheWithMetrics(2 ** 16, metric_name='raw_history', namespace=NAMESPACE)
|
||||||
# hashX: List[CachedAddressHistoryItem]
|
# hashX: List[CachedAddressHistoryItem]
|
||||||
self.hashX_history_cache = LRUCacheWithMetrics(2 ** 14, metric_name='full_history', namespace=NAMESPACE)
|
self.hashX_history_cache = LFUCacheWithMetrics(2 ** 14, metric_name='full_history', namespace=NAMESPACE)
|
||||||
# tx_num: Tuple[txid, height]
|
# tx_num: Tuple[txid, height]
|
||||||
self.history_tx_info_cache = LRUCacheWithMetrics(2 ** 19, metric_name='history_tx', namespace=NAMESPACE)
|
self.history_tx_info_cache = LFUCacheWithMetrics(2 ** 17, metric_name='history_tx', namespace=NAMESPACE)
|
||||||
|
|
||||||
def clear_caches(self):
|
def clear_caches(self):
|
||||||
self.resolve_cache.clear()
|
self.resolve_cache.clear()
|
||||||
|
@ -246,8 +246,9 @@ class SessionManager:
|
||||||
total_tx_nums = list(total_tx_nums)
|
total_tx_nums = list(total_tx_nums)
|
||||||
# collect the total new tx infos
|
# collect the total new tx infos
|
||||||
referenced_new_txs = {
|
referenced_new_txs = {
|
||||||
tx_num: (CachedAddressHistoryItem(tx_hash=tx_hash[::-1].hex(), height=bisect_right(self.db.tx_counts, tx_num)))
|
tx_num: (CachedAddressHistoryItem(
|
||||||
for tx_num, tx_hash in zip(total_tx_nums, self.db._get_tx_hashes(total_tx_nums))
|
tx_hash=tx_hash[::-1].hex(), height=bisect_right(self.db.tx_counts, tx_num)
|
||||||
|
)) for tx_num, tx_hash in zip(total_tx_nums, self.db._get_tx_hashes(total_tx_nums))
|
||||||
}
|
}
|
||||||
# update the cached history lists
|
# update the cached history lists
|
||||||
get_referenced = referenced_new_txs.__getitem__
|
get_referenced = referenced_new_txs.__getitem__
|
||||||
|
|
|
@ -11,7 +11,7 @@ from hub import PROMETHEUS_NAMESPACE
|
||||||
from hub.db.prefixes import ACTIVATED_SUPPORT_TXO_TYPE, ACTIVATED_CLAIM_TXO_TYPE
|
from hub.db.prefixes import ACTIVATED_SUPPORT_TXO_TYPE, ACTIVATED_CLAIM_TXO_TYPE
|
||||||
from hub.db.prefixes import PendingActivationKey, PendingActivationValue, ClaimToTXOValue
|
from hub.db.prefixes import PendingActivationKey, PendingActivationValue, ClaimToTXOValue
|
||||||
from hub.error.base import ChainError
|
from hub.error.base import ChainError
|
||||||
from hub.common import hash_to_hex_str, hash160, RPCError, HISTOGRAM_BUCKETS, StagedClaimtrieItem, sha256, LRUCache
|
from hub.common import hash_to_hex_str, hash160, RPCError, HISTOGRAM_BUCKETS, StagedClaimtrieItem, sha256, LFUCache, LRUCache
|
||||||
from hub.scribe.db import PrimaryDB
|
from hub.scribe.db import PrimaryDB
|
||||||
from hub.scribe.daemon import LBCDaemon
|
from hub.scribe.daemon import LBCDaemon
|
||||||
from hub.scribe.transaction import Tx, TxOutput, TxInput, Block
|
from hub.scribe.transaction import Tx, TxOutput, TxInput, Block
|
||||||
|
@ -122,9 +122,9 @@ class BlockchainProcessorService(BlockchainService):
|
||||||
self.pending_transaction_num_mapping: Dict[bytes, int] = {}
|
self.pending_transaction_num_mapping: Dict[bytes, int] = {}
|
||||||
self.pending_transactions: Dict[int, bytes] = {}
|
self.pending_transactions: Dict[int, bytes] = {}
|
||||||
|
|
||||||
self.hashX_history_cache = LRUCache(max(100, env.hashX_history_cache_size))
|
self.hashX_history_cache = LFUCache(max(100, env.hashX_history_cache_size))
|
||||||
self.hashX_full_cache = LRUCache(max(100, env.hashX_history_cache_size))
|
self.hashX_full_cache = LFUCache(max(100, env.hashX_history_cache_size))
|
||||||
self.history_tx_info_cache = LRUCache(2 ** 16)
|
self.history_tx_info_cache = LFUCache(2 ** 17)
|
||||||
|
|
||||||
def open_db(self):
|
def open_db(self):
|
||||||
env = self.env
|
env = self.env
|
||||||
|
|
Loading…
Reference in a new issue