improve resolve caching
This commit is contained in:
parent
6416d8ce9c
commit
99df418f1d
2 changed files with 50 additions and 29 deletions
|
@ -236,6 +236,8 @@ class BlockProcessor:
|
||||||
|
|
||||||
# self.search_cache = {}
|
# self.search_cache = {}
|
||||||
self.resolve_cache = LRUCache(2**16)
|
self.resolve_cache = LRUCache(2**16)
|
||||||
|
self.resolve_outputs_cache = LRUCache(2 ** 16)
|
||||||
|
|
||||||
self.history_cache = {}
|
self.history_cache = {}
|
||||||
self.status_server = StatusServer()
|
self.status_server = StatusServer()
|
||||||
|
|
||||||
|
@ -1590,6 +1592,7 @@ class BlockProcessor:
|
||||||
self.pending_transactions.clear()
|
self.pending_transactions.clear()
|
||||||
self.pending_support_amount_change.clear()
|
self.pending_support_amount_change.clear()
|
||||||
self.resolve_cache.clear()
|
self.resolve_cache.clear()
|
||||||
|
self.resolve_outputs_cache.clear()
|
||||||
|
|
||||||
async def backup_block(self):
|
async def backup_block(self):
|
||||||
assert len(self.db.prefix_db._op_stack) == 0
|
assert len(self.db.prefix_db._op_stack) == 0
|
||||||
|
|
|
@ -1016,10 +1016,16 @@ class LBRYElectrumX(SessionBase):
|
||||||
return self.bp.resolve_cache[url]
|
return self.bp.resolve_cache[url]
|
||||||
|
|
||||||
async def claimtrie_resolve(self, *urls):
|
async def claimtrie_resolve(self, *urls):
|
||||||
|
sorted_urls = tuple(sorted(urls))
|
||||||
|
self.session_mgr.urls_to_resolve_count_metric.inc(len(sorted_urls))
|
||||||
|
|
||||||
|
def _cached_resolve():
|
||||||
rows, extra = [], []
|
rows, extra = [], []
|
||||||
|
|
||||||
for url in urls:
|
for url in urls:
|
||||||
self.session_mgr.urls_to_resolve_count_metric.inc()
|
if url not in self.bp.resolve_cache:
|
||||||
stream, channel, repost, reposted_channel = await self._cached_resolve_url(url)
|
self.bp.resolve_cache[url] = self.db._resolve(url)
|
||||||
|
stream, channel, repost, reposted_channel = self.bp.resolve_cache[url]
|
||||||
if isinstance(channel, ResolveCensoredError):
|
if isinstance(channel, ResolveCensoredError):
|
||||||
rows.append(channel)
|
rows.append(channel)
|
||||||
extra.append(channel.censor_row)
|
extra.append(channel.censor_row)
|
||||||
|
@ -1044,7 +1050,19 @@ class LBRYElectrumX(SessionBase):
|
||||||
if reposted_channel:
|
if reposted_channel:
|
||||||
extra.append(reposted_channel)
|
extra.append(reposted_channel)
|
||||||
# print("claimtrie resolve %i rows %i extrat" % (len(rows), len(extra)))
|
# print("claimtrie resolve %i rows %i extrat" % (len(rows), len(extra)))
|
||||||
return Outputs.to_base64(rows, extra, 0, None, None)
|
self.bp.resolve_outputs_cache[sorted_urls] = serialized_outputs = Outputs.to_base64(
|
||||||
|
rows, extra, 0, None, None
|
||||||
|
)
|
||||||
|
return serialized_outputs
|
||||||
|
|
||||||
|
try:
|
||||||
|
if sorted_urls in self.bp.resolve_outputs_cache:
|
||||||
|
return self.bp.resolve_outputs_cache[sorted_urls]
|
||||||
|
else:
|
||||||
|
|
||||||
|
return await self.loop.run_in_executor(None, _cached_resolve)
|
||||||
|
finally:
|
||||||
|
self.session_mgr.resolved_url_count_metric.inc(len(sorted_urls))
|
||||||
|
|
||||||
async def get_server_height(self):
|
async def get_server_height(self):
|
||||||
return self.bp.height
|
return self.bp.height
|
||||||
|
|
Loading…
Reference in a new issue