Merge pull request #2836 from lbryio/wallet-server-lru-caches

Add LRU caches for blocks and block hashes in the wallet server
This commit is contained in:
Jack Robison 2020-03-01 15:19:28 -05:00 committed by GitHub
commit 0d427c9b90
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 19 additions and 3 deletions

View file

@ -3,6 +3,7 @@ import itertools
import json
import time
from functools import wraps
from pylru import lrucache
import aiohttp
@ -42,6 +43,8 @@ class Daemon:
self._height = None
self.available_rpcs = {}
self.connector = aiohttp.TCPConnector()
self._block_hash_cache = lrucache(1000000)
self._block_cache = lrucache(100000)
async def close(self):
if self.connector:
@ -219,12 +222,25 @@ class Daemon:
async def block_hex_hashes(self, first, count):
"""Return the hex hashes of count block starting at height first."""
if first + count < (self.cached_height() or 0) - 200:
return await self._cached_block_hex_hashes(first, count)
params_iterable = ((h, ) for h in range(first, first + count))
return await self._send_vector('getblockhash', params_iterable)
async def _cached_block_hex_hashes(self, first, count):
"""Return the hex hashes of count block starting at height first."""
cached = self._block_hash_cache.get((first, count))
if cached:
return cached
params_iterable = ((h, ) for h in range(first, first + count))
self._block_hash_cache[(first, count)] = await self._send_vector('getblockhash', params_iterable)
return self._block_hash_cache[(first, count)]
async def deserialised_block(self, hex_hash):
"""Return the deserialised block with the given hex hash."""
return await self._send_single('getblock', (hex_hash, True))
if not self._block_cache.get(hex_hash):
self._block_cache[hex_hash] = await self._send_single('getblock', (hex_hash, True))
return self._block_cache[hex_hash]
async def raw_blocks(self, hex_hashes):
"""Return the raw binary blocks with the given hex hashes."""

View file

@ -112,9 +112,9 @@ class MemPool:
"""Print regular logs of mempool stats."""
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
start = time.time()
start = time.perf_counter()
await synchronized_event.wait()
elapsed = time.time() - start
elapsed = time.perf_counter() - start
self.logger.info(f'synced in {elapsed:.2f}s')
while True:
self.logger.info(f'{len(self.txs):,d} txs '