lbry-sdk/scripts/time_to_first_byte.py

169 lines
6.3 KiB
Python
Raw Normal View History

2019-01-22 23:45:13 +01:00
import os
import json
2018-12-12 07:43:17 +01:00
import argparse
2018-12-19 20:46:47 +01:00
import asyncio
import aiohttp
2018-12-11 19:04:32 +01:00
import time
2018-12-19 20:46:47 +01:00
from aiohttp import ClientConnectorError
2019-01-23 16:41:34 +01:00
from lbrynet import __version__
from lbrynet.blob.blob_file import MAX_BLOB_SIZE
2019-01-23 16:41:34 +01:00
from lbrynet.conf import Config
2019-01-11 21:01:56 +01:00
from lbrynet.schema.uri import parse_lbry_uri
2019-01-23 22:41:14 +01:00
from lbrynet.extras.daemon.client import daemon_rpc
2019-01-22 23:45:13 +01:00
from lbrynet.extras import system_info, cli
2018-12-11 19:04:32 +01:00
2018-12-19 20:46:47 +01:00
def extract_uris(response):
uris = list()
for key in response:
for value in response[key]:
uris.append(value)
2018-12-11 19:04:32 +01:00
2018-12-19 20:46:47 +01:00
return uris
2018-12-11 19:04:32 +01:00
2018-12-19 20:46:47 +01:00
async def get_frontpage_uris():
session = aiohttp.ClientSession()
2019-01-11 21:01:56 +01:00
try:
response = await session.get("https://api.lbry.io/file/list_homepage", timeout=10.0)
if response.status != 200:
print("API returned non 200 code!!")
return
body = await response.json()
await session.close()
uris = extract_uris(body['data']['Uris'])
return uris
finally:
2018-12-19 20:46:47 +01:00
await session.close()
2018-12-11 19:04:32 +01:00
2019-01-22 23:45:13 +01:00
async def report_to_slack(output, webhook):
payload = {
"text": f"lbrynet {__version__} ({system_info.get_platform()['platform']}) time to first byte:\n{output}"
}
async with aiohttp.request('post', webhook, data=json.dumps(payload)):
pass
2019-02-20 21:10:36 +01:00
def confidence(times, z, plus_err=True):
2019-01-22 23:45:13 +01:00
mean = sum(times) / len(times)
standard_dev = (sum(((t - sum(times) / len(times)) ** 2.0 for t in times)) / len(times)) ** 0.5
err = (z * standard_dev) / (len(times) ** 0.5)
2019-02-20 21:10:36 +01:00
return f"{round((mean + err) if plus_err else (mean - err), 3)}"
2019-01-22 23:45:13 +01:00
def variance(times):
mean = sum(times) / len(times)
return round(sum(((i - mean) ** 2.0 for i in times)) / (len(times) - 1), 3)
2019-01-31 18:34:25 +01:00
async def wait_for_done(conf, uri):
2019-01-22 23:45:13 +01:00
name = uri.split("#")[0]
last_complete = 0
hang_count = 0
while True:
2019-01-31 18:34:25 +01:00
files = await daemon_rpc(conf, "file_list", claim_name=name)
2019-01-22 23:45:13 +01:00
file = files[0]
if file['status'] in ['finished', 'stopped']:
return True, file['blobs_completed'], file['blobs_in_stream']
2019-01-22 23:45:13 +01:00
if last_complete < int(file['blobs_completed']):
hang_count = 0
last_complete = int(file['blobs_completed'])
else:
hang_count += 1
await asyncio.sleep(1.0)
2019-01-31 18:34:25 +01:00
if hang_count > 10:
return False, file['blobs_completed'], file['blobs_in_stream']
2019-01-22 23:45:13 +01:00
2019-02-20 20:16:51 +01:00
async def main(uris=None, allow_fees=False):
2019-01-22 23:45:13 +01:00
if not uris:
uris = await get_frontpage_uris()
2019-01-23 22:41:14 +01:00
conf = Config()
2018-12-19 20:46:47 +01:00
try:
2019-01-23 22:41:14 +01:00
await daemon_rpc(conf, 'status')
2018-12-19 20:46:47 +01:00
except (ClientConnectorError, ConnectionError):
2019-01-22 23:45:13 +01:00
print("Could not connect to daemon")
return 1
print(f"Checking {len(uris)} uris from the front page")
print("**********************************************")
resolvable = []
2019-04-23 20:40:10 +02:00
async def __resolve(name):
resolved = await daemon_rpc(conf, 'resolve', urls=[name])
2019-01-22 23:45:13 +01:00
if 'error' not in resolved.get(name, {}):
2019-04-23 20:40:10 +02:00
if ("fee" not in resolved[name]['claim']['value']) or allow_fees:
2019-02-20 20:16:51 +01:00
resolvable.append(name)
else:
print(f"{name} has a fee, skipping it")
else:
print(f"failed to resolve {name}: {resolved[name]['error']}")
2019-04-23 20:40:10 +02:00
await asyncio.gather(*(__resolve(name) for name in uris))
2019-02-20 20:16:51 +01:00
print(f"attempting to download {len(resolvable)}/{len(uris)} frontpage streams")
2018-12-11 19:04:32 +01:00
2019-01-11 21:01:56 +01:00
first_byte_times = []
2019-02-20 20:16:51 +01:00
download_speeds = []
download_successes = []
failed_to_start = []
2019-01-22 23:45:13 +01:00
download_failures = []
2018-12-11 19:04:32 +01:00
2019-01-22 23:45:13 +01:00
for uri in resolvable:
2019-01-23 22:41:14 +01:00
await daemon_rpc(conf, 'file_delete', delete_from_download_dir=True, claim_name=parse_lbry_uri(uri).name)
2019-01-11 21:01:56 +01:00
2019-01-22 23:45:13 +01:00
for i, uri in enumerate(resolvable):
2018-12-11 19:04:32 +01:00
start = time.time()
2019-01-11 21:01:56 +01:00
try:
2019-04-23 20:40:10 +02:00
await daemon_rpc(conf, 'get', uri=uri)
2019-01-11 21:01:56 +01:00
first_byte = time.time()
first_byte_times.append(first_byte - start)
2019-01-22 23:45:13 +01:00
print(f"{i + 1}/{len(resolvable)} - {first_byte - start} {uri}")
downloaded, amount_downloaded, blobs_in_stream = await wait_for_done(conf, uri)
if downloaded:
2019-02-20 20:16:51 +01:00
download_successes.append(uri)
else:
download_failures.append(uri)
2019-02-20 20:16:51 +01:00
mbs = round((blobs_in_stream * (MAX_BLOB_SIZE - 1)) / (time.time() - start) / 1000000, 2)
download_speeds.append(mbs)
print(f"downloaded {amount_downloaded}/{blobs_in_stream} blobs for {uri} at "
2019-02-20 20:16:51 +01:00
f"{mbs}mb/s")
2019-01-11 21:01:56 +01:00
except:
print(f"{i + 1}/{len(uris)} - failed to start {uri}")
2019-02-20 20:16:51 +01:00
failed_to_start.append(uri)
return
# await daemon_rpc(conf, 'file_delete', delete_from_download_dir=True, claim_name=parse_lbry_uri(uri).name)
2019-01-22 23:45:13 +01:00
await asyncio.sleep(0.1)
print("**********************************************")
2019-02-20 21:10:36 +01:00
result = f"Started {len(first_byte_times)} of {len(resolvable)} attempted front page streams\n" \
f"Worst first byte time: {round(max(first_byte_times), 2)}\n" \
f"Best first byte time: {round(min(first_byte_times), 2)}\n" \
2019-02-20 21:10:36 +01:00
f"95% confidence time-to-first-byte: {confidence(first_byte_times, 1.984)}s\n" \
f"99% confidence time-to-first-byte: {confidence(first_byte_times, 2.626)}s\n" \
2019-01-22 23:45:13 +01:00
f"Variance: {variance(first_byte_times)}\n" \
2019-02-20 20:16:51 +01:00
f"Downloaded {len(download_successes)}/{len(resolvable)}\n" \
2019-02-20 21:10:36 +01:00
f"Best stream download speed: {round(max(download_speeds), 2)}mb/s\n" \
f"Worst stream download speed: {round(min(download_speeds), 2)}mb/s\n" \
f"95% confidence download speed: {confidence(download_speeds, 1.984, False)}mb/s\n" \
f"99% confidence download speed: {confidence(download_speeds, 2.626, False)}mb/s\n"
2019-02-20 20:16:51 +01:00
if failed_to_start:
result += "\nFailed to start:" + "\n".join([f for f in failed_to_start])
if download_failures:
result += "\nFailed to finish:" + "\n".join([f for f in download_failures])
2019-01-22 23:45:13 +01:00
print(result)
2019-01-31 18:34:25 +01:00
2019-02-20 20:16:51 +01:00
webhook = os.environ.get('TTFB_SLACK_TOKEN', None)
if webhook:
await report_to_slack(result, webhook)
2018-12-11 19:04:32 +01:00
if __name__ == "__main__":
2018-12-12 07:43:17 +01:00
parser = argparse.ArgumentParser()
2019-01-22 23:45:13 +01:00
parser.add_argument("--data_dir")
parser.add_argument("--wallet_dir")
parser.add_argument("--download_directory")
2019-02-20 20:16:51 +01:00
parser.add_argument("--allow_fees", action='store_true')
2018-12-12 07:43:17 +01:00
args = parser.parse_args()
2019-02-20 20:16:51 +01:00
asyncio.run(main(allow_fees=args.allow_fees))