lbry-sdk/scripts/dht_node.py

113 lines
4.9 KiB
Python
Raw Permalink Normal View History

2021-09-27 08:26:34 +02:00
import asyncio
import argparse
import logging
2021-11-17 07:58:27 +01:00
import csv
2021-12-06 15:07:55 +01:00
import os.path
2021-11-17 07:58:27 +01:00
from io import StringIO
2021-09-28 23:52:23 +02:00
from typing import Optional
from aiohttp import web
from prometheus_client import generate_latest as prom_generate_latest
2021-09-27 08:26:34 +02:00
from lbry.dht.constants import generate_id
from lbry.dht.node import Node
from lbry.dht.peer import PeerManager
from lbry.extras.daemon.storage import SQLiteStorage
from lbry.conf import Config
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)-4s %(name)s:%(lineno)d: %(message)s")
log = logging.getLogger(__name__)
class SimpleMetrics:
2021-11-17 07:58:27 +01:00
def __init__(self, port, node):
self.prometheus_port = port
2021-11-17 07:58:27 +01:00
self.dht_node: Node = node
2021-12-03 19:42:20 +01:00
async def handle_metrics_get_request(self, _):
try:
return web.Response(
text=prom_generate_latest().decode(),
content_type='text/plain; version=0.0.4'
)
except Exception:
log.exception('could not generate prometheus data')
raise
2021-12-03 19:42:20 +01:00
async def handle_peers_csv(self, _):
2021-11-17 07:58:27 +01:00
out = StringIO()
writer = csv.DictWriter(out, fieldnames=["ip", "port", "dht_id"])
writer.writeheader()
for peer in self.dht_node.protocol.routing_table.get_peers():
writer.writerow({"ip": peer.address, "port": peer.udp_port, "dht_id": peer.node_id.hex()})
return web.Response(text=out.getvalue(), content_type='text/csv')
2021-11-17 07:58:27 +01:00
2021-12-03 19:42:20 +01:00
async def handle_blobs_csv(self, _):
out = StringIO()
writer = csv.DictWriter(out, fieldnames=["blob_hash"])
writer.writeheader()
for blob in self.dht_node.protocol.data_store.keys():
writer.writerow({"blob_hash": blob.hex()})
return web.Response(text=out.getvalue(), content_type='text/csv')
2021-11-17 07:58:27 +01:00
async def start(self):
prom_app = web.Application()
prom_app.router.add_get('/metrics', self.handle_metrics_get_request)
2021-12-24 15:47:33 +01:00
if self.dht_node:
prom_app.router.add_get('/peers.csv', self.handle_peers_csv)
prom_app.router.add_get('/blobs.csv', self.handle_blobs_csv)
metrics_runner = web.AppRunner(prom_app)
await metrics_runner.setup()
prom_site = web.TCPSite(metrics_runner, "0.0.0.0", self.prometheus_port)
await prom_site.start()
2021-12-24 15:47:33 +01:00
async def main(host: str, port: int, db_file_path: str, bootstrap_node: Optional[str], prometheus_port: int, export: bool):
2021-09-27 08:26:34 +02:00
loop = asyncio.get_event_loop()
conf = Config()
2021-12-06 15:07:55 +01:00
if not db_file_path.startswith(':memory:'):
node_id_file_path = db_file_path + 'node_id'
if os.path.exists(node_id_file_path):
with open(node_id_file_path, 'rb') as node_id_file:
node_id = node_id_file.read()
else:
with open(node_id_file_path, 'wb') as node_id_file:
node_id = generate_id()
node_id_file.write(node_id)
2021-09-28 08:58:31 +02:00
storage = SQLiteStorage(conf, db_file_path, loop, loop.time)
2021-09-28 23:52:23 +02:00
if bootstrap_node:
nodes = bootstrap_node.split(':')
nodes = [(nodes[0], int(nodes[1]))]
else:
nodes = conf.known_dht_nodes
2021-09-27 08:26:34 +02:00
await storage.open()
node = Node(
2021-12-06 15:07:55 +01:00
loop, PeerManager(loop), node_id, port, port, 3333, None,
2022-07-11 09:11:01 +02:00
storage=storage, is_bootstrap_node=True
2021-09-27 08:26:34 +02:00
)
2021-11-17 07:58:27 +01:00
if prometheus_port > 0:
2021-12-24 15:47:33 +01:00
metrics = SimpleMetrics(prometheus_port, node if export else None)
2021-11-17 07:58:27 +01:00
await metrics.start()
2021-09-28 23:52:23 +02:00
node.start(host, nodes)
2021-12-06 15:07:55 +01:00
log.info("Peer with id %s started", node_id.hex())
2021-09-27 08:26:34 +02:00
while True:
await asyncio.sleep(10)
log.info("Known peers: %d. Storing contact information for %d blobs from %d peers.",
len(node.protocol.routing_table.get_peers()), len(node.protocol.data_store),
len(node.protocol.data_store.get_storing_contacts()))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Starts a single DHT node, which then can be used as a seed node or just a contributing node.")
2021-09-27 18:33:10 +02:00
parser.add_argument("--host", default='0.0.0.0', type=str, help="Host to listen for requests. Default: 0.0.0.0")
parser.add_argument("--port", default=4444, type=int, help="Port to listen for requests. Default: 4444")
2021-09-28 08:58:31 +02:00
parser.add_argument("--db_file", default='/tmp/dht.db', type=str, help="DB file to save peers. Default: /tmp/dht.db")
2021-09-28 23:52:23 +02:00
parser.add_argument("--bootstrap_node", default=None, type=str,
help="Node to connect for bootstraping this node. Leave unset to use the default ones. "
"Format: host:port Example: lbrynet1.lbry.com:4444")
2021-12-24 15:47:33 +01:00
parser.add_argument("--metrics_port", default=0, type=int, help="Port for Prometheus metrics. 0 to disable. Default: 0")
parser.add_argument("--enable_csv_export", action='store_true', help="Enable CSV endpoints on metrics server.")
2021-09-27 08:26:34 +02:00
args = parser.parse_args()
2021-12-24 15:47:33 +01:00
asyncio.run(main(args.host, args.port, args.db_file, args.bootstrap_node, args.metrics_port, args.enable_csv_export))