forked from LBRYCommunity/lbry-sdk
update claim_search_performance.py, add new test cases shown by time_to_first_byte.py
This commit is contained in:
parent
c876d891fa
commit
1a0680ead9
1 changed files with 77 additions and 46 deletions
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import textwrap
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
@ -9,7 +10,7 @@ from lbry.wallet.ledger import MainNetLedger
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
log.addHandler(logging.StreamHandler())
|
log.addHandler(logging.StreamHandler())
|
||||||
log.setLevel(logging.INFO)
|
log.setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
DEFAULT_ANY_TAGS = [
|
DEFAULT_ANY_TAGS = [
|
||||||
'blockchain',
|
'blockchain',
|
||||||
|
@ -68,43 +69,46 @@ def get_args(limit=20):
|
||||||
args = []
|
args = []
|
||||||
any_tags_combinations = [DEFAULT_ANY_TAGS, COMMON_AND_RARE, RARE_ANY_TAGS, COMMON_AND_RARE2, CITY_FIX, []]
|
any_tags_combinations = [DEFAULT_ANY_TAGS, COMMON_AND_RARE, RARE_ANY_TAGS, COMMON_AND_RARE2, CITY_FIX, []]
|
||||||
not_tags_combinations = [MATURE_TAGS, []]
|
not_tags_combinations = [MATURE_TAGS, []]
|
||||||
for no_totals in [True]:
|
for no_fee in [False, True]:
|
||||||
for offset in [0, 100]:
|
for claim_type in [None, 'stream', 'channel']:
|
||||||
for any_tags in any_tags_combinations:
|
for no_totals in [True]:
|
||||||
for not_tags in not_tags_combinations:
|
for offset in [0, 100]:
|
||||||
for order_by in ORDER_BY:
|
for any_tags in any_tags_combinations:
|
||||||
kw = {
|
for not_tags in not_tags_combinations:
|
||||||
'order_by': order_by,
|
for order_by in ORDER_BY:
|
||||||
'offset': offset,
|
kw = {
|
||||||
'limit': limit,
|
'order_by': order_by,
|
||||||
'no_totals': no_totals
|
'offset': offset,
|
||||||
}
|
'limit': limit,
|
||||||
if not_tags:
|
'no_totals': no_totals
|
||||||
kw['not_tags'] = not_tags
|
}
|
||||||
if any_tags:
|
if not_tags:
|
||||||
kw['any_tags'] = any_tags
|
kw['not_tags'] = not_tags
|
||||||
args.append(kw)
|
if any_tags:
|
||||||
print(len(args), "argument combinations")
|
kw['any_tags'] = any_tags
|
||||||
|
if claim_type:
|
||||||
|
kw['claim_type'] = claim_type
|
||||||
|
if no_fee:
|
||||||
|
kw['fee_amount'] = 0
|
||||||
|
args.append(kw)
|
||||||
|
print(f"-- Trying {len(args)} argument combinations")
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
|
||||||
def _search(kwargs):
|
def _search(kwargs):
|
||||||
start = time.time()
|
start = time.perf_counter()
|
||||||
|
error = None
|
||||||
try:
|
try:
|
||||||
search_to_bytes(kwargs)
|
search_to_bytes(kwargs)
|
||||||
t = time.time() - start
|
|
||||||
return t, kwargs
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
return -1, f"failed: error={str(type(err))}({str(err)})"
|
error = str(err)
|
||||||
|
return time.perf_counter() - start, kwargs, error
|
||||||
|
|
||||||
|
|
||||||
async def search(executor, kwargs):
|
async def search(executor, kwargs):
|
||||||
try:
|
return await asyncio.get_running_loop().run_in_executor(
|
||||||
return await asyncio.get_running_loop().run_in_executor(
|
executor, _search, kwargs
|
||||||
executor, _search, kwargs
|
)
|
||||||
)
|
|
||||||
except Exception as err:
|
|
||||||
return f"failed (err={str(type(err))}({err}))- {kwargs}"
|
|
||||||
|
|
||||||
|
|
||||||
async def main(db_path, max_query_time):
|
async def main(db_path, max_query_time):
|
||||||
|
@ -115,23 +119,50 @@ async def main(db_path, max_query_time):
|
||||||
tasks = [search(query_executor, constraints) for constraints in get_args()]
|
tasks = [search(query_executor, constraints) for constraints in get_args()]
|
||||||
try:
|
try:
|
||||||
results = await asyncio.gather(*tasks)
|
results = await asyncio.gather(*tasks)
|
||||||
for ts, constraints in results:
|
query_times = [
|
||||||
if ts >= max_query_time:
|
{
|
||||||
sql = interpolate(*_get_claims("""
|
'sql': interpolate(*_get_claims("""
|
||||||
claimtrie.claim_hash as is_controlling,
|
claimtrie.claim_hash as is_controlling,
|
||||||
claimtrie.last_take_over_height,
|
claimtrie.last_take_over_height,
|
||||||
claim.claim_hash, claim.txo_hash,
|
claim.claim_hash, claim.txo_hash,
|
||||||
claim.claims_in_channel,
|
claim.claims_in_channel,
|
||||||
claim.height, claim.creation_height,
|
claim.height, claim.creation_height,
|
||||||
claim.activation_height, claim.expiration_height,
|
claim.activation_height, claim.expiration_height,
|
||||||
claim.effective_amount, claim.support_amount,
|
claim.effective_amount, claim.support_amount,
|
||||||
claim.trending_group, claim.trending_mixed,
|
claim.trending_group, claim.trending_mixed,
|
||||||
claim.trending_local, claim.trending_global,
|
claim.trending_local, claim.trending_global,
|
||||||
claim.short_url, claim.canonical_url,
|
claim.short_url, claim.canonical_url,
|
||||||
claim.channel_hash, channel.txo_hash AS channel_txo_hash,
|
claim.channel_hash, channel.txo_hash AS channel_txo_hash,
|
||||||
channel.height AS channel_height, claim.signature_valid
|
channel.height AS channel_height, claim.signature_valid
|
||||||
""", **constraints))
|
""", **constraints)),
|
||||||
print(f"Query took {int(ts * 1000)}ms\n{sql}")
|
'duration': ts,
|
||||||
|
'error': error
|
||||||
|
}
|
||||||
|
for ts, constraints, error in results
|
||||||
|
]
|
||||||
|
errored = [query_info for query_info in query_times if query_info['error']]
|
||||||
|
errors = {str(query_info['error']): [] for query_info in errored}
|
||||||
|
for error in errored:
|
||||||
|
errors[str(error['error'])].append(error['sql'])
|
||||||
|
slow = [
|
||||||
|
query_info for query_info in query_times
|
||||||
|
if not query_info['error'] and query_info['duration'] > (max_query_time / 2.0)
|
||||||
|
]
|
||||||
|
fast = [
|
||||||
|
query_info for query_info in query_times
|
||||||
|
if not query_info['error'] and query_info['duration'] <= (max_query_time / 2.0)
|
||||||
|
]
|
||||||
|
print(f"-- {len(fast)} queries were fast")
|
||||||
|
slow.sort(key=lambda query_info: query_info['duration'], reverse=True)
|
||||||
|
print(f"-- Failing queries:")
|
||||||
|
for error in errors:
|
||||||
|
print(f"-- Failure: \"{error}\"")
|
||||||
|
for failing_query in errors[error]:
|
||||||
|
print(f"{textwrap.dedent(failing_query)};\n")
|
||||||
|
print()
|
||||||
|
print(f"-- Slow queries:")
|
||||||
|
for slow_query in slow:
|
||||||
|
print(f"-- Query took {slow_query['duration']}\n{textwrap.dedent(slow_query['sql'])};\n")
|
||||||
finally:
|
finally:
|
||||||
query_executor.shutdown()
|
query_executor.shutdown()
|
||||||
|
|
||||||
|
@ -139,7 +170,7 @@ async def main(db_path, max_query_time):
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('--db_path', dest='db_path', default=os.path.expanduser('~/claims.db'), type=str)
|
parser.add_argument('--db_path', dest='db_path', default=os.path.expanduser('~/claims.db'), type=str)
|
||||||
parser.add_argument('--max_time', dest='max_time', default=0.0, type=float)
|
parser.add_argument('--max_time', dest='max_time', default=0.25, type=float)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
db_path = args.db_path
|
db_path = args.db_path
|
||||||
max_query_time = args.max_time
|
max_query_time = args.max_time
|
||||||
|
|
Loading…
Reference in a new issue