added better stats to sqlite_perf_test.py
This commit is contained in:
parent
82fd3c632e
commit
bbc056eef0
1 changed files with 12 additions and 11 deletions
|
@ -1,6 +1,7 @@
|
||||||
import uvloop, asyncio, time, sys, logging
|
import uvloop, asyncio, time, sys, logging
|
||||||
from concurrent.futures import ProcessPoolExecutor
|
from concurrent.futures import ProcessPoolExecutor
|
||||||
from lbry.wallet.server.db import reader
|
from lbry.wallet.server.db import reader
|
||||||
|
from lbry.wallet.server.metrics import calculate_avg_percentiles
|
||||||
|
|
||||||
|
|
||||||
db_path = '../../../lbryconf/wallet-server/claims.db'
|
db_path = '../../../lbryconf/wallet-server/claims.db'
|
||||||
|
@ -16,16 +17,14 @@ async def run_times(executor, iterations, show=True):
|
||||||
'no_totals': True,
|
'no_totals': True,
|
||||||
'offset': 0,
|
'offset': 0,
|
||||||
'limit': 20,
|
'limit': 20,
|
||||||
'fee_amount': '<1',
|
|
||||||
#'all_tags': ['funny'],
|
|
||||||
'any_tags': [
|
'any_tags': [
|
||||||
'crypto',
|
'ufos', 'city fix'
|
||||||
'outdoors',
|
|
||||||
'cars',
|
|
||||||
'automotive'
|
|
||||||
],
|
],
|
||||||
'not_tags': [
|
'not_tags': [
|
||||||
'nsfw', 'xxx', 'mature'
|
'porn', 'mature', 'xxx', 'nsfw'
|
||||||
|
],
|
||||||
|
'order_by': [
|
||||||
|
'release_time'
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
) for _ in range(iterations)))
|
) for _ in range(iterations)))
|
||||||
|
@ -36,6 +35,8 @@ async def run_times(executor, iterations, show=True):
|
||||||
print(f"{iterations:4}: {total}ms total concurrent, {len(timings)*avg*1000:.3f}s total sequential (avg*runs)")
|
print(f"{iterations:4}: {total}ms total concurrent, {len(timings)*avg*1000:.3f}s total sequential (avg*runs)")
|
||||||
print(f" {total/len(timings):.1f}ms/query concurrent (total/runs)")
|
print(f" {total/len(timings):.1f}ms/query concurrent (total/runs)")
|
||||||
print(f" {avg:.1f}ms/query actual average (sum(queries)/runs)")
|
print(f" {avg:.1f}ms/query actual average (sum(queries)/runs)")
|
||||||
|
stats = calculate_avg_percentiles(timings)
|
||||||
|
print(f" min: {stats[1]}, 5%: {stats[2]}, 25%: {stats[3]}, 50%: {stats[4]}, 75%: {stats[5]}, 95%: {stats[6]}, max: {stats[7]}")
|
||||||
sys.stdout.write(' sample:')
|
sys.stdout.write(' sample:')
|
||||||
for i, t in zip(range(10), timings[::-1]):
|
for i, t in zip(range(10), timings[::-1]):
|
||||||
sys.stdout.write(f' {t}ms')
|
sys.stdout.write(f' {t}ms')
|
||||||
|
@ -44,14 +45,14 @@ async def run_times(executor, iterations, show=True):
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
executor = ProcessPoolExecutor(
|
executor = ProcessPoolExecutor(
|
||||||
4, initializer=reader.initializer, initargs=(log, db_path, 'mainnet', default_query_timout, True)
|
4, initializer=reader.initializer, initargs=(log, db_path, 'mainnet', 1.0, True)
|
||||||
)
|
)
|
||||||
await run_times(executor, 4, show=False)
|
#await run_times(executor, 4, show=False)
|
||||||
await run_times(executor, 1)
|
#await run_times(executor, 1)
|
||||||
await run_times(executor, 2**3)
|
await run_times(executor, 2**3)
|
||||||
await run_times(executor, 2**5)
|
await run_times(executor, 2**5)
|
||||||
await run_times(executor, 2**7)
|
await run_times(executor, 2**7)
|
||||||
await run_times(executor, 2**9)
|
#await run_times(executor, 2**9)
|
||||||
#await run_times(executor, 2**11)
|
#await run_times(executor, 2**11)
|
||||||
#await run_times(executor, 2**13)
|
#await run_times(executor, 2**13)
|
||||||
executor.shutdown(True)
|
executor.shutdown(True)
|
||||||
|
|
Loading…
Reference in a new issue