2019-01-22 23:45:13 +01:00
|
|
|
import os
|
|
|
|
import json
|
2018-12-12 07:43:17 +01:00
|
|
|
import argparse
|
2018-12-19 20:46:47 +01:00
|
|
|
import asyncio
|
|
|
|
import aiohttp
|
2018-12-11 19:04:32 +01:00
|
|
|
import time
|
|
|
|
|
2018-12-19 20:46:47 +01:00
|
|
|
from aiohttp import ClientConnectorError
|
2019-01-23 16:41:34 +01:00
|
|
|
from lbrynet import __version__
|
2019-02-09 02:14:10 +01:00
|
|
|
from lbrynet.blob.blob_file import MAX_BLOB_SIZE
|
2019-01-23 16:41:34 +01:00
|
|
|
from lbrynet.conf import Config
|
2019-01-11 21:01:56 +01:00
|
|
|
from lbrynet.schema.uri import parse_lbry_uri
|
2019-01-23 22:41:14 +01:00
|
|
|
from lbrynet.extras.daemon.client import daemon_rpc
|
2019-01-22 23:45:13 +01:00
|
|
|
from lbrynet.extras import system_info, cli
|
2018-12-11 19:04:32 +01:00
|
|
|
|
|
|
|
|
2018-12-19 20:46:47 +01:00
|
|
|
def extract_uris(response):
|
|
|
|
uris = list()
|
|
|
|
for key in response:
|
|
|
|
for value in response[key]:
|
|
|
|
uris.append(value)
|
2018-12-11 19:04:32 +01:00
|
|
|
|
2018-12-19 20:46:47 +01:00
|
|
|
return uris
|
2018-12-11 19:04:32 +01:00
|
|
|
|
|
|
|
|
2018-12-19 20:46:47 +01:00
|
|
|
async def get_frontpage_uris():
|
|
|
|
session = aiohttp.ClientSession()
|
2019-01-11 21:01:56 +01:00
|
|
|
try:
|
|
|
|
response = await session.get("https://api.lbry.io/file/list_homepage", timeout=10.0)
|
|
|
|
if response.status != 200:
|
|
|
|
print("API returned non 200 code!!")
|
|
|
|
return
|
|
|
|
body = await response.json()
|
|
|
|
await session.close()
|
|
|
|
uris = extract_uris(body['data']['Uris'])
|
|
|
|
return uris
|
|
|
|
finally:
|
2018-12-19 20:46:47 +01:00
|
|
|
await session.close()
|
2018-12-11 19:04:32 +01:00
|
|
|
|
|
|
|
|
2019-01-22 23:45:13 +01:00
|
|
|
async def report_to_slack(output, webhook):
|
|
|
|
payload = {
|
|
|
|
"text": f"lbrynet {__version__} ({system_info.get_platform()['platform']}) time to first byte:\n{output}"
|
|
|
|
}
|
|
|
|
async with aiohttp.request('post', webhook, data=json.dumps(payload)):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def confidence(times, z):
|
|
|
|
mean = sum(times) / len(times)
|
|
|
|
standard_dev = (sum(((t - sum(times) / len(times)) ** 2.0 for t in times)) / len(times)) ** 0.5
|
|
|
|
err = (z * standard_dev) / (len(times) ** 0.5)
|
|
|
|
return f"{round(mean, 3) + round(err, 3)}s"
|
|
|
|
|
|
|
|
|
|
|
|
def variance(times):
|
|
|
|
mean = sum(times) / len(times)
|
|
|
|
return round(sum(((i - mean) ** 2.0 for i in times)) / (len(times) - 1), 3)
|
|
|
|
|
|
|
|
|
2019-01-31 18:34:25 +01:00
|
|
|
async def wait_for_done(conf, uri):
|
2019-01-22 23:45:13 +01:00
|
|
|
name = uri.split("#")[0]
|
|
|
|
last_complete = 0
|
|
|
|
hang_count = 0
|
|
|
|
while True:
|
2019-01-31 18:34:25 +01:00
|
|
|
files = await daemon_rpc(conf, "file_list", claim_name=name)
|
2019-01-22 23:45:13 +01:00
|
|
|
file = files[0]
|
|
|
|
if file['status'] in ['finished', 'stopped']:
|
2019-02-09 02:14:10 +01:00
|
|
|
return True, file['blobs_completed'], file['blobs_in_stream']
|
2019-01-22 23:45:13 +01:00
|
|
|
if last_complete < int(file['blobs_completed']):
|
|
|
|
hang_count = 0
|
|
|
|
last_complete = int(file['blobs_completed'])
|
|
|
|
else:
|
|
|
|
hang_count += 1
|
|
|
|
await asyncio.sleep(1.0)
|
2019-01-31 18:34:25 +01:00
|
|
|
if hang_count > 10:
|
2019-02-09 02:14:10 +01:00
|
|
|
return False, file['blobs_completed'], file['blobs_in_stream']
|
2019-01-22 23:45:13 +01:00
|
|
|
|
|
|
|
|
2019-01-31 18:34:25 +01:00
|
|
|
async def main(uris=None):
|
2019-01-22 23:45:13 +01:00
|
|
|
if not uris:
|
|
|
|
uris = await get_frontpage_uris()
|
2019-01-23 22:41:14 +01:00
|
|
|
conf = Config()
|
2018-12-19 20:46:47 +01:00
|
|
|
try:
|
2019-01-23 22:41:14 +01:00
|
|
|
await daemon_rpc(conf, 'status')
|
2018-12-19 20:46:47 +01:00
|
|
|
except (ClientConnectorError, ConnectionError):
|
2019-01-22 23:45:13 +01:00
|
|
|
print("Could not connect to daemon")
|
|
|
|
return 1
|
|
|
|
print(f"Checking {len(uris)} uris from the front page")
|
|
|
|
print("**********************************************")
|
|
|
|
|
|
|
|
resolvable = []
|
|
|
|
for name in uris:
|
2019-02-14 06:24:29 +01:00
|
|
|
resolved = await daemon_rpc(conf, 'resolve', name)
|
2019-01-22 23:45:13 +01:00
|
|
|
if 'error' not in resolved.get(name, {}):
|
|
|
|
resolvable.append(name)
|
|
|
|
|
|
|
|
print(f"{len(resolvable)}/{len(uris)} are resolvable")
|
2018-12-11 19:04:32 +01:00
|
|
|
|
2019-01-11 21:01:56 +01:00
|
|
|
first_byte_times = []
|
2019-01-22 23:45:13 +01:00
|
|
|
downloaded_times = []
|
|
|
|
failures = []
|
|
|
|
download_failures = []
|
2018-12-11 19:04:32 +01:00
|
|
|
|
2019-01-22 23:45:13 +01:00
|
|
|
for uri in resolvable:
|
2019-01-23 22:41:14 +01:00
|
|
|
await daemon_rpc(conf, 'file_delete', delete_from_download_dir=True, claim_name=parse_lbry_uri(uri).name)
|
2019-01-11 21:01:56 +01:00
|
|
|
|
2019-01-22 23:45:13 +01:00
|
|
|
for i, uri in enumerate(resolvable):
|
2018-12-11 19:04:32 +01:00
|
|
|
start = time.time()
|
2019-01-11 21:01:56 +01:00
|
|
|
try:
|
2019-01-23 22:41:14 +01:00
|
|
|
await daemon_rpc(conf, 'get', uri)
|
2019-01-11 21:01:56 +01:00
|
|
|
first_byte = time.time()
|
|
|
|
first_byte_times.append(first_byte - start)
|
2019-01-22 23:45:13 +01:00
|
|
|
print(f"{i + 1}/{len(resolvable)} - {first_byte - start} {uri}")
|
2019-02-09 02:14:10 +01:00
|
|
|
downloaded, amount_downloaded, blobs_in_stream = await wait_for_done(conf, uri)
|
|
|
|
if downloaded:
|
|
|
|
downloaded_times.append((time.time() - start) / downloaded)
|
|
|
|
else:
|
|
|
|
download_failures.append(uri)
|
|
|
|
print(f"downloaded {amount_downloaded}/{blobs_in_stream} blobs for {uri} at "
|
|
|
|
f"{round((blobs_in_stream * (MAX_BLOB_SIZE - 1)) / (time.time() - start) / 1000000, 2)}mb/s\n")
|
2019-01-11 21:01:56 +01:00
|
|
|
except:
|
2019-02-09 02:14:10 +01:00
|
|
|
print(f"{i + 1}/{len(uris)} - failed to start {uri}")
|
2019-01-22 23:45:13 +01:00
|
|
|
failures.append(uri)
|
2019-02-09 02:14:10 +01:00
|
|
|
return
|
|
|
|
# await daemon_rpc(conf, 'file_delete', delete_from_download_dir=True, claim_name=parse_lbry_uri(uri).name)
|
2019-01-22 23:45:13 +01:00
|
|
|
await asyncio.sleep(0.1)
|
|
|
|
|
|
|
|
print("**********************************************")
|
|
|
|
result = f"Tried to start downloading {len(resolvable)} streams from the front page\n" \
|
2019-02-09 02:14:10 +01:00
|
|
|
f"Worst first byte time: {round(max(first_byte_times), 2)}\n" \
|
|
|
|
f"Best first byte time: {round(min(first_byte_times), 2)}\n" \
|
2019-01-22 23:45:13 +01:00
|
|
|
f"95% confidence time-to-first-byte: {confidence(first_byte_times, 1.984)}\n" \
|
|
|
|
f"99% confidence time-to-first-byte: {confidence(first_byte_times, 2.626)}\n" \
|
|
|
|
f"Variance: {variance(first_byte_times)}\n" \
|
|
|
|
f"Started {len(first_byte_times)}/{len(resolvable)} streams"
|
|
|
|
if failures:
|
|
|
|
nt = '\n\t'
|
|
|
|
result += f"\nFailures:\n\t{nt.join([f for f in failures])}"
|
|
|
|
print(result)
|
2019-01-31 18:34:25 +01:00
|
|
|
|
2019-01-23 16:41:34 +01:00
|
|
|
# webhook = os.environ.get('TTFB_SLACK_TOKEN', None)
|
|
|
|
# if webhook:
|
|
|
|
# await report_to_slack(result, webhook)
|
2018-12-11 19:04:32 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2018-12-12 07:43:17 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
2019-01-22 23:45:13 +01:00
|
|
|
parser.add_argument("--data_dir")
|
|
|
|
parser.add_argument("--wallet_dir")
|
|
|
|
parser.add_argument("--download_directory")
|
2018-12-12 07:43:17 +01:00
|
|
|
args = parser.parse_args()
|
2019-01-11 21:01:56 +01:00
|
|
|
asyncio.run(main())
|