automatically batch large resolve requests

This commit is contained in:
Jack Robison 2020-04-05 19:27:13 -04:00
parent 5b29894048
commit d615f6761a
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
2 changed files with 12 additions and 1 deletions

View file

@ -752,7 +752,11 @@ class Ledger(metaclass=LedgerRegistry):
async def resolve(self, accounts, urls, **kwargs):
resolve = partial(self.network.retriable_call, self.network.resolve)
txos = (await self._inflate_outputs(resolve(urls), accounts, **kwargs))[0]
urls_copy = list(urls)
txos = []
while urls_copy:
batch, urls_copy = urls_copy[:500], urls_copy[500:]
txos.extend((await self._inflate_outputs(resolve(batch), accounts, **kwargs))[0])
assert len(urls) == len(txos), "Mismatch between urls requested for resolve and responses received."
result = {}
for url, txo in zip(urls, txos):

View file

@ -1,6 +1,7 @@
import os.path
import tempfile
import logging
import asyncio
from binascii import unhexlify
from urllib.request import urlopen
@ -79,6 +80,12 @@ class ClaimSearchCommand(ClaimTestCase):
] * 23828
self.assertListEqual([], await self.claim_search(claim_ids=claim_ids))
# this should do nothing... if the resolve (which is retried) results in the server disconnecting,
# it kerplodes
await asyncio.wait_for(self.daemon.jsonrpc_resolve([
f'0000000000000000000000000000000000000000{i}' for i in range(30000)
]), 30)
# 23829 claim ids makes the request just large enough
claim_ids = [
'0000000000000000000000000000000000000000',