2018-01-31 02:16:25 +01:00
|
|
|
"""A simple script that attempts to directly download a single blob or stream from a given peer"""
|
2016-12-16 20:35:33 +01:00
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import sys
|
2018-01-31 02:16:25 +01:00
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
import shutil
|
|
|
|
from pprint import pprint
|
2016-12-16 20:35:33 +01:00
|
|
|
|
2018-10-17 21:07:30 +02:00
|
|
|
from twisted.internet import asyncioreactor
|
|
|
|
asyncioreactor.install()
|
|
|
|
from twisted.internet import defer, threads, reactor
|
2016-12-16 20:35:33 +01:00
|
|
|
|
2018-11-21 01:00:32 +01:00
|
|
|
from lbrynet import conf, log_support
|
|
|
|
from lbrynet.p2p import Peer
|
|
|
|
from lbrynet.p2p.SinglePeerDownloader import SinglePeerDownloader
|
|
|
|
from lbrynet.p2p.StreamDescriptor import BlobStreamDescriptorReader
|
|
|
|
from lbrynet.p2p.BlobManager import DiskBlobManager
|
|
|
|
from lbrynet.extras.daemon.Components import f2d
|
|
|
|
from lbrynet.extras.daemon.storage import SQLiteStorage
|
|
|
|
from lbrynet.extras.wallet import LbryWalletManager
|
2016-12-16 20:35:33 +01:00
|
|
|
|
|
|
|
log = logging.getLogger()
|
|
|
|
|
|
|
|
|
|
|
|
def main(args=None):
|
2017-01-18 00:11:14 +01:00
|
|
|
conf.initialize_settings()
|
2016-12-16 20:35:33 +01:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('peer')
|
|
|
|
parser.add_argument('blob_hash')
|
2018-01-31 02:16:25 +01:00
|
|
|
parser.add_argument('--timeout', type=int, default=30)
|
2016-12-16 20:35:33 +01:00
|
|
|
args = parser.parse_args(args)
|
|
|
|
|
|
|
|
log_support.configure_console(level='DEBUG')
|
2018-01-31 02:16:25 +01:00
|
|
|
log_support.configure_twisted()
|
2016-12-16 20:35:33 +01:00
|
|
|
|
2018-01-31 02:16:25 +01:00
|
|
|
if ":" in str(args.peer):
|
|
|
|
host, port = str(args.peer).strip().split(":")
|
2016-12-16 20:35:33 +01:00
|
|
|
else:
|
2018-01-31 02:16:25 +01:00
|
|
|
host = args.peer
|
|
|
|
port = 3333
|
2016-12-16 20:35:33 +01:00
|
|
|
|
2018-01-31 02:16:25 +01:00
|
|
|
d = download_it(Peer.Peer(host, int(port)), args.timeout, args.blob_hash)
|
|
|
|
d.addErrback(log.exception)
|
|
|
|
d.addBoth(lambda _: reactor.callLater(0, reactor.stop))
|
|
|
|
reactor.run()
|
2016-12-16 20:35:33 +01:00
|
|
|
|
|
|
|
|
2018-01-31 02:16:25 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def download_it(peer, timeout, blob_hash):
|
|
|
|
tmp_dir = yield threads.deferToThread(tempfile.mkdtemp)
|
2018-05-02 19:23:57 +02:00
|
|
|
storage = SQLiteStorage(tmp_dir, reactor)
|
|
|
|
yield storage.setup()
|
|
|
|
tmp_blob_manager = DiskBlobManager(tmp_dir, storage)
|
2016-12-16 20:35:33 +01:00
|
|
|
|
|
|
|
config = {'auto_connect': True}
|
2019-01-11 03:00:09 +01:00
|
|
|
config['wallet_dir'] = tempfile.mkdtemp()
|
2018-10-17 21:07:30 +02:00
|
|
|
config['use_keyring'] = False
|
|
|
|
config['blockchain_name'] = conf.settings['blockchain_name']
|
|
|
|
config['lbryum_servers'] = []
|
|
|
|
wallet = yield f2d(LbryWalletManager.from_lbrynet_config(config, storage))
|
2018-01-31 02:16:25 +01:00
|
|
|
|
|
|
|
downloader = SinglePeerDownloader()
|
|
|
|
downloader.setup(wallet)
|
|
|
|
|
|
|
|
try:
|
|
|
|
blob_downloaded = yield downloader.download_blob_from_peer(peer, timeout, blob_hash,
|
|
|
|
tmp_blob_manager)
|
|
|
|
if blob_downloaded:
|
|
|
|
log.info("SUCCESS!")
|
|
|
|
blob = yield tmp_blob_manager.get_blob(blob_hash)
|
|
|
|
pprint(blob)
|
|
|
|
if not blob.verified:
|
|
|
|
log.error("except that its not verified....")
|
|
|
|
else:
|
|
|
|
reader = BlobStreamDescriptorReader(blob)
|
|
|
|
info = None
|
|
|
|
for x in range(0, 3):
|
|
|
|
try:
|
|
|
|
info = yield reader.get_info()
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
if info:
|
|
|
|
break
|
2018-05-02 19:23:57 +02:00
|
|
|
|
2018-10-18 13:40:37 +02:00
|
|
|
# there's some kind of race condition where it sometimes doesn't write the blob to disk in time
|
2018-05-02 19:23:57 +02:00
|
|
|
time.sleep(0.1)
|
2018-01-31 02:16:25 +01:00
|
|
|
|
|
|
|
if info is not None:
|
|
|
|
pprint(info)
|
|
|
|
for content_blob in info['blobs']:
|
|
|
|
if 'blob_hash' in content_blob:
|
|
|
|
yield download_it(peer, timeout, content_blob['blob_hash'])
|
2016-12-16 20:35:33 +01:00
|
|
|
else:
|
2018-01-31 02:16:25 +01:00
|
|
|
log.error("Download failed")
|
|
|
|
finally:
|
|
|
|
yield tmp_blob_manager.stop()
|
|
|
|
yield threads.deferToThread(shutil.rmtree, tmp_dir)
|
2016-12-16 20:35:33 +01:00
|
|
|
|
2018-01-31 02:16:25 +01:00
|
|
|
defer.returnValue(True)
|
2016-12-16 20:35:33 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|