From 1a0680ead9c111e141c70c379bcab91113e73480 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Sun, 28 Jul 2019 21:22:07 -0400 Subject: [PATCH] update claim_search_performance.py, add new test cases shown by time_to_first_byte.py --- lbry/scripts/claim_search_performance.py | 123 ++++++++++++++--------- 1 file changed, 77 insertions(+), 46 deletions(-) diff --git a/lbry/scripts/claim_search_performance.py b/lbry/scripts/claim_search_performance.py index 88c03d373..21af64743 100644 --- a/lbry/scripts/claim_search_performance.py +++ b/lbry/scripts/claim_search_performance.py @@ -1,5 +1,6 @@ import os import time +import textwrap import argparse import asyncio import logging @@ -9,7 +10,7 @@ from lbry.wallet.ledger import MainNetLedger log = logging.getLogger(__name__) log.addHandler(logging.StreamHandler()) -log.setLevel(logging.INFO) +log.setLevel(logging.CRITICAL) DEFAULT_ANY_TAGS = [ 'blockchain', @@ -68,43 +69,46 @@ def get_args(limit=20): args = [] any_tags_combinations = [DEFAULT_ANY_TAGS, COMMON_AND_RARE, RARE_ANY_TAGS, COMMON_AND_RARE2, CITY_FIX, []] not_tags_combinations = [MATURE_TAGS, []] - for no_totals in [True]: - for offset in [0, 100]: - for any_tags in any_tags_combinations: - for not_tags in not_tags_combinations: - for order_by in ORDER_BY: - kw = { - 'order_by': order_by, - 'offset': offset, - 'limit': limit, - 'no_totals': no_totals - } - if not_tags: - kw['not_tags'] = not_tags - if any_tags: - kw['any_tags'] = any_tags - args.append(kw) - print(len(args), "argument combinations") + for no_fee in [False, True]: + for claim_type in [None, 'stream', 'channel']: + for no_totals in [True]: + for offset in [0, 100]: + for any_tags in any_tags_combinations: + for not_tags in not_tags_combinations: + for order_by in ORDER_BY: + kw = { + 'order_by': order_by, + 'offset': offset, + 'limit': limit, + 'no_totals': no_totals + } + if not_tags: + kw['not_tags'] = not_tags + if any_tags: + kw['any_tags'] = any_tags + if claim_type: + kw['claim_type'] = claim_type + if no_fee: + kw['fee_amount'] = 0 + args.append(kw) + print(f"-- Trying {len(args)} argument combinations") return args def _search(kwargs): - start = time.time() + start = time.perf_counter() + error = None try: search_to_bytes(kwargs) - t = time.time() - start - return t, kwargs except Exception as err: - return -1, f"failed: error={str(type(err))}({str(err)})" + error = str(err) + return time.perf_counter() - start, kwargs, error async def search(executor, kwargs): - try: - return await asyncio.get_running_loop().run_in_executor( - executor, _search, kwargs - ) - except Exception as err: - return f"failed (err={str(type(err))}({err}))- {kwargs}" + return await asyncio.get_running_loop().run_in_executor( + executor, _search, kwargs + ) async def main(db_path, max_query_time): @@ -115,23 +119,50 @@ async def main(db_path, max_query_time): tasks = [search(query_executor, constraints) for constraints in get_args()] try: results = await asyncio.gather(*tasks) - for ts, constraints in results: - if ts >= max_query_time: - sql = interpolate(*_get_claims(""" - claimtrie.claim_hash as is_controlling, - claimtrie.last_take_over_height, - claim.claim_hash, claim.txo_hash, - claim.claims_in_channel, - claim.height, claim.creation_height, - claim.activation_height, claim.expiration_height, - claim.effective_amount, claim.support_amount, - claim.trending_group, claim.trending_mixed, - claim.trending_local, claim.trending_global, - claim.short_url, claim.canonical_url, - claim.channel_hash, channel.txo_hash AS channel_txo_hash, - channel.height AS channel_height, claim.signature_valid - """, **constraints)) - print(f"Query took {int(ts * 1000)}ms\n{sql}") + query_times = [ + { + 'sql': interpolate(*_get_claims(""" + claimtrie.claim_hash as is_controlling, + claimtrie.last_take_over_height, + claim.claim_hash, claim.txo_hash, + claim.claims_in_channel, + claim.height, claim.creation_height, + claim.activation_height, claim.expiration_height, + claim.effective_amount, claim.support_amount, + claim.trending_group, claim.trending_mixed, + claim.trending_local, claim.trending_global, + claim.short_url, claim.canonical_url, + claim.channel_hash, channel.txo_hash AS channel_txo_hash, + channel.height AS channel_height, claim.signature_valid + """, **constraints)), + 'duration': ts, + 'error': error + } + for ts, constraints, error in results + ] + errored = [query_info for query_info in query_times if query_info['error']] + errors = {str(query_info['error']): [] for query_info in errored} + for error in errored: + errors[str(error['error'])].append(error['sql']) + slow = [ + query_info for query_info in query_times + if not query_info['error'] and query_info['duration'] > (max_query_time / 2.0) + ] + fast = [ + query_info for query_info in query_times + if not query_info['error'] and query_info['duration'] <= (max_query_time / 2.0) + ] + print(f"-- {len(fast)} queries were fast") + slow.sort(key=lambda query_info: query_info['duration'], reverse=True) + print(f"-- Failing queries:") + for error in errors: + print(f"-- Failure: \"{error}\"") + for failing_query in errors[error]: + print(f"{textwrap.dedent(failing_query)};\n") + print() + print(f"-- Slow queries:") + for slow_query in slow: + print(f"-- Query took {slow_query['duration']}\n{textwrap.dedent(slow_query['sql'])};\n") finally: query_executor.shutdown() @@ -139,7 +170,7 @@ async def main(db_path, max_query_time): if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--db_path', dest='db_path', default=os.path.expanduser('~/claims.db'), type=str) - parser.add_argument('--max_time', dest='max_time', default=0.0, type=float) + parser.add_argument('--max_time', dest='max_time', default=0.25, type=float) args = parser.parse_args() db_path = args.db_path max_query_time = args.max_time