cleanup
This commit is contained in:
parent
b9af999739
commit
e74a5e8ef3
1 changed files with 48 additions and 68 deletions
|
@ -1,18 +1,12 @@
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import keyring
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from aiohttp import ClientConnectorError
|
from aiohttp import ClientConnectorError
|
||||||
from lbrynet import conf
|
from lbrynet import conf
|
||||||
from lbrynet.extras.daemon.auth.client import UnAuthAPIClient
|
from lbrynet.schema.uri import parse_lbry_uri
|
||||||
|
from lbrynet.extras.daemon.DaemonConsole import LBRYAPIClient
|
||||||
|
|
||||||
def kill_loop():
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
loop.stop()
|
|
||||||
# loop.close()
|
|
||||||
|
|
||||||
|
|
||||||
def extract_uris(response):
|
def extract_uris(response):
|
||||||
|
@ -25,88 +19,74 @@ def extract_uris(response):
|
||||||
|
|
||||||
|
|
||||||
async def get_frontpage_uris():
|
async def get_frontpage_uris():
|
||||||
kr = keyring.get_keyring()
|
|
||||||
c = kr.get_preferred_collection()
|
|
||||||
lbry_keyring = None
|
|
||||||
for col in c.get_all_items():
|
|
||||||
if col.get_label() == "LBRY/auth_token":
|
|
||||||
lbry_keyring = col
|
|
||||||
break
|
|
||||||
|
|
||||||
if lbry_keyring is None:
|
|
||||||
print("An auth token is needed to fetch the front page uris")
|
|
||||||
print("To generate the auth token, run the LBRY app at least once")
|
|
||||||
print("Then run the script again")
|
|
||||||
|
|
||||||
lbry_keyring = lbry_keyring.get_secret().decode("ascii")
|
|
||||||
|
|
||||||
session = aiohttp.ClientSession()
|
session = aiohttp.ClientSession()
|
||||||
response = await session.get("https://api.lbry.io/file/list_homepage?auth_token={}".format(lbry_keyring))
|
try:
|
||||||
|
response = await session.get("https://api.lbry.io/file/list_homepage", timeout=10.0)
|
||||||
if response.status != 200:
|
if response.status != 200:
|
||||||
print("API returned non 200 code!!")
|
print("API returned non 200 code!!")
|
||||||
await session.close()
|
return
|
||||||
kill_loop()
|
|
||||||
|
|
||||||
body = await response.json()
|
body = await response.json()
|
||||||
await session.close()
|
await session.close()
|
||||||
uris = extract_uris(body['data']['Uris'])
|
uris = extract_uris(body['data']['Uris'])
|
||||||
return uris
|
return uris
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
uris = await get_frontpage_uris()
|
uris = await get_frontpage_uris()
|
||||||
api = await UnAuthAPIClient.from_url(conf.settings.get_api_connection_string())
|
print("got %i uris" % len(uris))
|
||||||
|
api = await LBRYAPIClient.get_client()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await api.status()
|
await api.status()
|
||||||
except (ClientConnectorError, ConnectionError):
|
except (ClientConnectorError, ConnectionError):
|
||||||
await api.session.close()
|
await api.session.close()
|
||||||
kill_loop()
|
|
||||||
print("Could not connect to daemon. Are you sure it's running?")
|
print("Could not connect to daemon. Are you sure it's running?")
|
||||||
return 1
|
return
|
||||||
|
|
||||||
results = dict()
|
first_byte_times = []
|
||||||
|
|
||||||
# uris = ["what", "holi", "aweqwfq"]
|
|
||||||
_sum = 0
|
|
||||||
downloaded = len(uris)
|
|
||||||
|
|
||||||
for uri in uris:
|
for uri in uris:
|
||||||
start = time.time()
|
await api.call(
|
||||||
resp = await api.call("get", {"uri": uri})
|
"file_delete", {
|
||||||
end = time.time()
|
"delete_from_download_dir": True,
|
||||||
|
|
||||||
await api.call("file_delete", {"delete_from_download_dir": True,
|
|
||||||
"delete_all": True,
|
"delete_all": True,
|
||||||
"claim_name": uri
|
"claim_name": parse_lbry_uri(uri).name
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
time_taken = end - start
|
for i, uri in enumerate(uris):
|
||||||
results[uri] = time_taken
|
start = time.time()
|
||||||
_sum += time_taken
|
try:
|
||||||
|
await api.call("get", {"uri": uri})
|
||||||
|
first_byte = time.time()
|
||||||
|
first_byte_times.append(first_byte - start)
|
||||||
|
print(f"{i + 1}/{len(uris)} - {first_byte - start} {uri}")
|
||||||
|
except:
|
||||||
|
print(f"{i + 1}/{len(uris)} - timed out in {time.time() - start} {uri}")
|
||||||
|
await api.call(
|
||||||
|
"file_delete", {
|
||||||
|
"delete_from_download_dir": True,
|
||||||
|
"claim_name": parse_lbry_uri(uri).name
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if resp.get('error'):
|
avg = sum(first_byte_times) / len(first_byte_times)
|
||||||
results[uri] = "Could not download"
|
|
||||||
downloaded -= 1
|
|
||||||
_sum -= time_taken
|
|
||||||
|
|
||||||
print(results[uri], uri)
|
|
||||||
|
|
||||||
avg = _sum / downloaded
|
|
||||||
print()
|
print()
|
||||||
print("Average time taken:", avg)
|
print(f"Average time to first byte: {avg} ({len(first_byte_times)} streams)")
|
||||||
print("Downloaded {} Not Downloaded {}".format(downloaded, len(uris) - downloaded))
|
print(f"Started {len(first_byte_times)} Timed out {len(uris) - len(first_byte_times)}")
|
||||||
|
|
||||||
await api.session.close()
|
await api.session.close()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("--data_dir")
|
parser.add_argument("--data_dir", default=None)
|
||||||
parser.add_argument("--wallet_dir")
|
parser.add_argument("--wallet_dir", default=None)
|
||||||
parser.add_argument("--download_directory")
|
parser.add_argument("--download_directory", default=None)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
conf.initialize_settings(
|
||||||
conf.initialize_settings(data_dir=args.data_dir, wallet_dir=args.wallet_dir, download_dir=args.download_directory)
|
data_dir=args.data_dir, wallet_dir=args.wallet_dir, download_dir=args.download_directory
|
||||||
loop = asyncio.get_event_loop()
|
)
|
||||||
loop.run_until_complete(main())
|
asyncio.run(main())
|
||||||
|
|
Loading…
Reference in a new issue