forked from LBRYCommunity/lbry-sdk
sqlite_perf_test uses lbry.wallet.server.db.reader
This commit is contained in:
parent
d083910255
commit
6d7aa93157
1 changed files with 28 additions and 55 deletions
|
@ -1,73 +1,46 @@
|
|||
import uvloop, asyncio, sqlite3, time, sys
|
||||
import uvloop, asyncio, time, sys
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from contextvars import ContextVar
|
||||
from lbry.wallet.server.db import reader
|
||||
|
||||
|
||||
db_path = '/tmp/wallet-server/claims.db'
|
||||
db = ContextVar('db')
|
||||
|
||||
|
||||
def init():
|
||||
conn = sqlite3.connect(db_path)
|
||||
db.set(conn)
|
||||
|
||||
|
||||
def reader():
|
||||
conn = db.get()
|
||||
start = time.time()
|
||||
conn.execute("""
|
||||
SELECT
|
||||
claimtrie.claim_hash as is_controlling,
|
||||
claimtrie.last_take_over_height,
|
||||
claim.claim_hash, claim.txo_hash,
|
||||
claim.claims_in_channel,
|
||||
claim.height, claim.creation_height,
|
||||
claim.activation_height, claim.expiration_height,
|
||||
claim.effective_amount, claim.support_amount,
|
||||
claim.trending_group, claim.trending_mixed,
|
||||
claim.trending_local, claim.trending_global,
|
||||
claim.short_url, claim.canonical_url,
|
||||
claim.channel_hash, channel.txo_hash AS channel_txo_hash,
|
||||
channel.height AS channel_height, claim.signature_valid
|
||||
FROM claim
|
||||
LEFT JOIN claimtrie USING (claim_hash)
|
||||
LEFT JOIN claim as channel ON (claim.channel_hash=channel.claim_hash)
|
||||
WHERE
|
||||
EXISTS(
|
||||
SELECT 1 FROM tag WHERE claim.claim_hash=tag.claim_hash
|
||||
AND tag IN ('alexandria ocasio-cortez', 'Alien', 'alt news', 'art', 'audio',
|
||||
'automotive', 'beliefs', 'blockchain', 'dog grooming', 'economics', 'food',
|
||||
'learning', 'mature', 'nature', 'news', 'physics', 'science', 'technology')
|
||||
)
|
||||
AND NOT EXISTS(
|
||||
SELECT 1 FROM tag WHERE claim.claim_hash=tag.claim_hash AND tag IN ('nsfw', 'xxx', 'mature')
|
||||
)
|
||||
ORDER BY claim.height DESC, claim.normalized ASC
|
||||
LIMIT 20 OFFSET 100
|
||||
""").fetchall()
|
||||
elapsed = time.time() - start
|
||||
return elapsed
|
||||
|
||||
|
||||
async def run_times(executor, iterations, show=True):
|
||||
start = time.time()
|
||||
timings = await asyncio.gather(*(
|
||||
asyncio.get_running_loop().run_in_executor(executor, reader) for _ in range(iterations)
|
||||
))
|
||||
total = time.time() - start
|
||||
start = time.perf_counter()
|
||||
timings = await asyncio.gather(*(asyncio.get_running_loop().run_in_executor(
|
||||
executor, reader.search_to_bytes, {
|
||||
'no_totals': True,
|
||||
'offset': 0,
|
||||
'limit': 20,
|
||||
'fee_amount': '<1',
|
||||
#'all_tags': ['funny'],
|
||||
'any_tags': [
|
||||
'crypto',
|
||||
'outdoors',
|
||||
'cars',
|
||||
'automotive'
|
||||
],
|
||||
'not_tags': [
|
||||
'nsfw', 'xxx', 'mature'
|
||||
]
|
||||
}
|
||||
) for _ in range(iterations)))
|
||||
timings = [r[1]['execute_query']['total'] for r in timings]
|
||||
total = int((time.perf_counter() - start) * 100)
|
||||
if show:
|
||||
avg = sum(timings)/len(timings)
|
||||
print(f"{iterations:4}: {total:.5f}ms total concurrent, {len(timings)*avg:.5f}ms total sequential (avg*runs)")
|
||||
print(f" {total/len(timings):.5f}ms/query concurrent (total/runs)")
|
||||
print(f" {avg:.5f}ms/query actual average (sum(queries)/runs)")
|
||||
print(f"{iterations:4}: {total}ms total concurrent, {len(timings)*avg*1000:.3f}s total sequential (avg*runs)")
|
||||
print(f" {total/len(timings):.1f}ms/query concurrent (total/runs)")
|
||||
print(f" {avg:.1f}ms/query actual average (sum(queries)/runs)")
|
||||
sys.stdout.write(' sample:')
|
||||
for i, t in zip(range(10), timings[::-1]):
|
||||
sys.stdout.write(f' {t:.5f}ms')
|
||||
sys.stdout.write(f' {t}ms')
|
||||
print(' ...\n' if len(timings) > 10 else '\n')
|
||||
|
||||
|
||||
async def main():
|
||||
executor = ProcessPoolExecutor(4, initializer=init)
|
||||
executor = ProcessPoolExecutor(4, initializer=reader.initializer, initargs=(db_path, 'mainnet', True))
|
||||
await run_times(executor, 4, show=False)
|
||||
await run_times(executor, 1)
|
||||
await run_times(executor, 2**3)
|
||||
|
|
Loading…
Reference in a new issue