2018-02-20 19:46:17 +01:00
|
|
|
import struct
|
2016-08-10 15:29:44 +02:00
|
|
|
import io
|
|
|
|
|
|
|
|
from Crypto.PublicKey import RSA
|
2018-03-27 23:35:31 +02:00
|
|
|
from twisted.internet import defer, error
|
2018-03-14 23:12:40 +01:00
|
|
|
from twisted.python.failure import Failure
|
2016-08-10 15:29:44 +02:00
|
|
|
|
2018-03-14 23:12:40 +01:00
|
|
|
from lbrynet.core.client.ClientRequest import ClientRequest
|
|
|
|
from lbrynet.core.Error import RequestCanceledError
|
2016-10-20 18:23:39 +02:00
|
|
|
from lbrynet.core import BlobAvailability
|
2018-02-20 19:46:17 +01:00
|
|
|
from lbrynet.core.utils import generate_id
|
2017-06-26 03:04:33 +02:00
|
|
|
from lbrynet.daemon import ExchangeRateManager as ERM
|
2017-01-17 04:23:20 +01:00
|
|
|
from lbrynet import conf
|
2018-02-20 19:46:17 +01:00
|
|
|
from util import debug_kademlia_packet
|
2016-08-10 15:29:44 +02:00
|
|
|
|
2016-11-04 17:36:43 +01:00
|
|
|
KB = 2**10
|
|
|
|
|
2017-02-10 16:56:22 +01:00
|
|
|
|
|
|
|
class FakeLBRYFile(object):
|
|
|
|
def __init__(self, blob_manager, stream_info_manager, stream_hash, uri="fake_uri"):
|
|
|
|
self.blob_manager = blob_manager
|
|
|
|
self.stream_info_manager = stream_info_manager
|
|
|
|
self.stream_hash = stream_hash
|
2017-11-03 18:14:31 +01:00
|
|
|
self.file_name = 'fake_lbry_file'
|
2017-02-10 16:56:22 +01:00
|
|
|
|
2018-02-20 19:46:17 +01:00
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
class Node(object):
|
2018-03-27 23:35:31 +02:00
|
|
|
def __init__(self, peer_finder=None, peer_manager=None, **kwargs):
|
2018-02-20 19:46:17 +01:00
|
|
|
self.peer_finder = peer_finder
|
|
|
|
self.peer_manager = peer_manager
|
2018-03-27 23:35:31 +02:00
|
|
|
self.peerPort = 3333
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def joinNetwork(self, *args):
|
2018-02-20 19:46:17 +01:00
|
|
|
return defer.succeed(True)
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def stop(self):
|
2018-02-20 19:46:17 +01:00
|
|
|
return defer.succeed(None)
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
|
2017-02-21 19:47:47 +01:00
|
|
|
class FakeNetwork(object):
|
|
|
|
@staticmethod
|
|
|
|
def get_local_height():
|
|
|
|
return 1
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_server_height():
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2017-05-31 20:15:15 +02:00
|
|
|
class BTCLBCFeed(ERM.MarketFeed):
|
|
|
|
def __init__(self):
|
|
|
|
ERM.MarketFeed.__init__(
|
|
|
|
self,
|
|
|
|
"BTCLBC",
|
|
|
|
"market name",
|
|
|
|
"derp.com",
|
|
|
|
None,
|
|
|
|
0.0
|
|
|
|
)
|
|
|
|
|
|
|
|
class USDBTCFeed(ERM.MarketFeed):
|
|
|
|
def __init__(self):
|
|
|
|
ERM.MarketFeed.__init__(
|
|
|
|
self,
|
|
|
|
"USDBTC",
|
|
|
|
"market name",
|
|
|
|
"derp.com",
|
|
|
|
None,
|
|
|
|
0.0
|
|
|
|
)
|
|
|
|
|
|
|
|
class ExchangeRateManager(ERM.ExchangeRateManager):
|
|
|
|
def __init__(self, market_feeds, rates):
|
|
|
|
self.market_feeds = market_feeds
|
|
|
|
for feed in self.market_feeds:
|
|
|
|
feed.rate = ERM.ExchangeRate(
|
|
|
|
feed.market, rates[feed.market]['spot'], rates[feed.market]['ts'])
|
|
|
|
|
|
|
|
|
2018-03-27 23:35:31 +02:00
|
|
|
class PointTraderKeyExchanger(object):
|
2018-03-14 23:12:40 +01:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'public_key': self.wallet.encoded_public_key},
|
|
|
|
'public_key')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_exchange_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
|
|
|
def _handle_exchange_response(self, response_dict, peer, request, protocol):
|
|
|
|
assert request.response_identifier in response_dict, \
|
|
|
|
"Expected %s in dict but did not get it" % request.response_identifier
|
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
peer_pub_key = response_dict[request.response_identifier]
|
|
|
|
self.wallet.set_public_key_for_peer(peer, peer_pub_key)
|
|
|
|
return True
|
|
|
|
|
|
|
|
def _request_failed(self, err, peer):
|
|
|
|
if not err.check(RequestCanceledError):
|
|
|
|
return err
|
|
|
|
|
|
|
|
|
2018-03-27 23:35:31 +02:00
|
|
|
class PointTraderKeyQueryHandlerFactory(object):
|
2018-03-14 23:12:40 +01:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = PointTraderKeyQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'public_key'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return ("Point Trader Address - an address for receiving payments on the "
|
|
|
|
"point trader testing network")
|
|
|
|
|
|
|
|
|
2018-03-27 23:35:31 +02:00
|
|
|
class PointTraderKeyQueryHandler(object):
|
2018-03-14 23:12:40 +01:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['public_key']
|
|
|
|
self.public_key = None
|
|
|
|
self.peer = None
|
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
|
|
|
def handle_queries(self, queries):
|
|
|
|
if self.query_identifiers[0] in queries:
|
|
|
|
new_encoded_pub_key = queries[self.query_identifiers[0]]
|
|
|
|
try:
|
|
|
|
RSA.importKey(new_encoded_pub_key)
|
|
|
|
except (ValueError, TypeError, IndexError):
|
|
|
|
return defer.fail(Failure(ValueError("Client sent an invalid public key")))
|
|
|
|
self.public_key = new_encoded_pub_key
|
|
|
|
self.wallet.set_public_key_for_peer(self.peer, self.public_key)
|
|
|
|
fields = {'public_key': self.wallet.encoded_public_key}
|
|
|
|
return defer.succeed(fields)
|
|
|
|
if self.public_key is None:
|
|
|
|
return defer.fail(Failure(ValueError("Expected but did not receive a public key")))
|
|
|
|
else:
|
|
|
|
return defer.succeed({})
|
|
|
|
|
2017-05-31 20:15:15 +02:00
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
class Wallet(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.private_key = RSA.generate(1024)
|
|
|
|
self.encoded_public_key = self.private_key.publickey().exportKey()
|
2017-01-26 02:06:17 +01:00
|
|
|
self._config = None
|
|
|
|
self.network = None
|
|
|
|
self.wallet = None
|
|
|
|
self.is_first_run = False
|
|
|
|
self.printed_retrieving_headers = False
|
|
|
|
self._start_check = None
|
|
|
|
self._catch_up_check = None
|
|
|
|
self._caught_up_counter = 0
|
|
|
|
self._lag_counter = 0
|
|
|
|
self.blocks_behind = 0
|
|
|
|
self.catchup_progress = 0
|
|
|
|
self.max_behind = 0
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def start(self):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_info_exchanger(self):
|
2018-03-14 23:12:40 +01:00
|
|
|
return PointTraderKeyExchanger(self)
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
2018-03-14 23:12:40 +01:00
|
|
|
return PointTraderKeyQueryHandlerFactory(self)
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def reserve_points(self, *args):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def cancel_point_reservation(self, *args):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def send_points(self, *args):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def add_expected_payment(self, *args):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def get_balance(self):
|
|
|
|
return defer.succeed(1000)
|
|
|
|
|
|
|
|
def set_public_key_for_peer(self, peer, public_key):
|
|
|
|
pass
|
|
|
|
|
2016-10-20 18:23:39 +02:00
|
|
|
def get_claim_metadata_for_sd_hash(self, sd_hash):
|
2017-03-08 15:36:32 +01:00
|
|
|
return "fakeuri", "aa04a949348f9f094d503e5816f0cfb57ee68a22f6d08d149217d071243e0377", 1
|
2016-10-20 18:23:39 +02:00
|
|
|
|
2017-02-16 15:12:16 +01:00
|
|
|
def get_claimid(self, name, txid=None, nout=None):
|
2017-03-08 15:36:32 +01:00
|
|
|
return "aa04a949348f9f094d503e5816f0cfb57ee68a22f6d08d149217d071243e0378"
|
2017-02-16 15:12:16 +01:00
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
class PeerFinder(object):
|
|
|
|
def __init__(self, start_port, peer_manager, num_peers):
|
|
|
|
self.start_port = start_port
|
|
|
|
self.peer_manager = peer_manager
|
|
|
|
self.num_peers = num_peers
|
|
|
|
self.count = 0
|
|
|
|
|
2017-10-27 20:12:52 +02:00
|
|
|
def find_peers_for_blob(self, h, filter_self=False):
|
2016-08-10 15:29:44 +02:00
|
|
|
peer_port = self.start_port + self.count
|
|
|
|
self.count += 1
|
|
|
|
if self.count >= self.num_peers:
|
|
|
|
self.count = 0
|
|
|
|
return defer.succeed([self.peer_manager.get_peer("127.0.0.1", peer_port)])
|
|
|
|
|
|
|
|
def run_manage_loop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class Announcer(object):
|
|
|
|
def __init__(self, *args):
|
|
|
|
pass
|
|
|
|
|
2017-02-13 19:53:55 +01:00
|
|
|
def hash_queue_size(self):
|
|
|
|
return 0
|
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
def add_supplier(self, supplier):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def immediate_announce(self, *args):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def run_manage_loop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
pass
|
|
|
|
|
2018-02-20 19:46:17 +01:00
|
|
|
def get_next_announce_time(self):
|
|
|
|
return 0
|
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
class GenFile(io.RawIOBase):
|
|
|
|
def __init__(self, size, pattern):
|
|
|
|
io.RawIOBase.__init__(self)
|
|
|
|
self.size = size
|
|
|
|
self.pattern = pattern
|
|
|
|
self.read_so_far = 0
|
|
|
|
self.buff = b''
|
|
|
|
self.last_offset = 0
|
2018-02-12 20:19:15 +01:00
|
|
|
self.name = "."
|
2016-08-10 15:29:44 +02:00
|
|
|
|
|
|
|
def readable(self):
|
|
|
|
return True
|
|
|
|
|
|
|
|
def writable(self):
|
|
|
|
return False
|
|
|
|
|
|
|
|
def read(self, n=-1):
|
|
|
|
if n > -1:
|
|
|
|
bytes_to_read = min(n, self.size - self.read_so_far)
|
|
|
|
else:
|
|
|
|
bytes_to_read = self.size - self.read_so_far
|
|
|
|
output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:]
|
|
|
|
bytes_to_read -= len(output)
|
|
|
|
while bytes_to_read > 0:
|
|
|
|
self.buff = self._generate_chunk()
|
|
|
|
new_output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:]
|
|
|
|
bytes_to_read -= len(new_output)
|
|
|
|
output += new_output
|
|
|
|
self.read_so_far += len(output)
|
|
|
|
return output
|
|
|
|
|
|
|
|
def readall(self):
|
|
|
|
return self.read()
|
|
|
|
|
2016-11-04 17:36:43 +01:00
|
|
|
def _generate_chunk(self, size=KB):
|
|
|
|
output = self.pattern[self.last_offset:self.last_offset + size]
|
|
|
|
n_left = size - len(output)
|
2016-08-10 15:29:44 +02:00
|
|
|
whole_patterns = n_left / len(self.pattern)
|
|
|
|
output += self.pattern * whole_patterns
|
2016-11-04 17:36:43 +01:00
|
|
|
self.last_offset = size - len(output)
|
2016-08-10 15:29:44 +02:00
|
|
|
output += self.pattern[:self.last_offset]
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2016-10-20 18:23:39 +02:00
|
|
|
class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker):
|
2016-10-13 19:35:55 +02:00
|
|
|
"""
|
|
|
|
Class to track peer counts for known blobs, and to discover new popular blobs
|
|
|
|
|
|
|
|
Attributes:
|
|
|
|
availability (dict): dictionary of peers for known blobs
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, blob_manager=None, peer_finder=None, dht_node=None):
|
|
|
|
self.availability = {
|
2017-10-02 18:13:45 +02:00
|
|
|
'91dc64cf1ff42e20d627b033ad5e4c3a4a96856ed8a6e3fb'
|
|
|
|
'4cd5fa1cfba4bf72eefd325f579db92f45f4355550ace8e7': ['1.2.3.4'],
|
|
|
|
'b2e48bb4c88cf46b76adf0d47a72389fae0cd1f19ed27dc5'
|
|
|
|
'09138c99509a25423a4cef788d571dca7988e1dca69e6fa0': ['1.2.3.4', '1.2.3.4'],
|
|
|
|
'6af95cd062b4a179576997ef1054c9d2120f8592eea045e9'
|
|
|
|
'667bea411d520262cd5a47b137eabb7a7871f5f8a79c92dd':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'6d8017aba362e5c5d0046625a039513419810a0397d72831'
|
|
|
|
'8c328a5cc5d96efb589fbca0728e54fe5adbf87e9545ee07':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'5a450b416275da4bdff604ee7b58eaedc7913c5005b7184f'
|
|
|
|
'c3bc5ef0b1add00613587f54217c91097fc039ed9eace9dd':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'd7c82e6cac093b3f16107d2ae2b2c75424f1fcad2c7fbdbe'
|
|
|
|
'66e4a13c0b6bd27b67b3a29c403b82279ab0f7c1c48d6787':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'9dbda74a472a2e5861a5d18197aeba0f5de67c67e401124c'
|
|
|
|
'243d2f0f41edf01d7a26aeb0b5fc9bf47f6361e0f0968e2c':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'8c70d5e2f5c3a6085006198e5192d157a125d92e73787944'
|
|
|
|
'72007a61947992768926513fc10924785bdb1761df3c37e6':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
|
|
|
'1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0'
|
|
|
|
'b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
|
|
|
'1.2.3.4', '1.2.3.4'],
|
2017-10-02 18:13:45 +02:00
|
|
|
'c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835'
|
|
|
|
'eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c':
|
2017-09-29 12:44:22 +02:00
|
|
|
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
|
|
|
'1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
2016-10-13 19:35:55 +02:00
|
|
|
}
|
|
|
|
self._blob_manager = None
|
|
|
|
self._peer_finder = PeerFinder(11223, 11224, 2)
|
|
|
|
self._dht_node = None
|
|
|
|
self._check_popular = None
|
|
|
|
self._check_mine = None
|
2016-12-14 23:14:37 +01:00
|
|
|
self._set_mean_peers()
|
2016-10-13 19:35:55 +02:00
|
|
|
|
|
|
|
def start(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-08-10 15:29:44 +02:00
|
|
|
create_stream_sd_file = {
|
|
|
|
'stream_name': '746573745f66696c65',
|
|
|
|
'blobs': [
|
|
|
|
{
|
|
|
|
'length': 2097152,
|
|
|
|
'blob_num': 0,
|
2017-10-02 18:13:45 +02:00
|
|
|
'blob_hash': 'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b'
|
|
|
|
'441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
|
2016-08-10 15:29:44 +02:00
|
|
|
'iv': '30303030303030303030303030303031'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'length': 2097152,
|
|
|
|
'blob_num': 1,
|
2017-10-02 18:13:45 +02:00
|
|
|
'blob_hash': 'f4067522c1b49432a2a679512e3917144317caa1abba0c04'
|
|
|
|
'1e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
|
2016-08-10 15:29:44 +02:00
|
|
|
'iv': '30303030303030303030303030303032'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'length': 1015056,
|
|
|
|
'blob_num': 2,
|
2017-10-02 18:13:45 +02:00
|
|
|
'blob_hash': '305486c434260484fcb2968ce0e963b72f81ba56c11b08b1'
|
|
|
|
'af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
|
2016-08-10 15:29:44 +02:00
|
|
|
'iv': '30303030303030303030303030303033'
|
|
|
|
},
|
|
|
|
{'length': 0, 'blob_num': 3, 'iv': '30303030303030303030303030303034'}
|
|
|
|
],
|
|
|
|
'stream_type': 'lbryfile',
|
|
|
|
'key': '30313233343536373031323334353637',
|
|
|
|
'suggested_file_name': '746573745f66696c65',
|
2017-10-02 18:13:45 +02:00
|
|
|
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d'
|
|
|
|
'315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'
|
2016-08-10 15:29:44 +02:00
|
|
|
}
|
2017-01-17 04:23:20 +01:00
|
|
|
|
|
|
|
|
|
|
|
def mock_conf_settings(obj, settings={}):
|
|
|
|
original_settings = conf.settings
|
2017-01-17 18:29:09 +01:00
|
|
|
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)
|
2017-03-30 18:55:54 +02:00
|
|
|
conf.settings.installation_id = conf.settings.get_installation_id()
|
2017-08-17 03:08:24 +02:00
|
|
|
conf.settings.node_id = conf.settings.get_node_id()
|
2017-01-17 04:23:20 +01:00
|
|
|
conf.settings.update(settings)
|
|
|
|
|
|
|
|
def _reset_settings():
|
|
|
|
conf.settings = original_settings
|
|
|
|
|
|
|
|
obj.addCleanup(_reset_settings)
|
2017-05-31 20:15:15 +02:00
|
|
|
|
|
|
|
|
2018-02-20 19:46:17 +01:00
|
|
|
MOCK_DHT_NODES = [
|
|
|
|
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
|
|
|
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
|
|
|
"DEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF",
|
|
|
|
]
|
|
|
|
|
|
|
|
MOCK_DHT_SEED_DNS = { # these map to mock nodes 0, 1, and 2
|
|
|
|
"lbrynet1.lbry.io": "10.42.42.1",
|
|
|
|
"lbrynet2.lbry.io": "10.42.42.2",
|
|
|
|
"lbrynet3.lbry.io": "10.42.42.3",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def resolve(name, timeout=(1, 3, 11, 45)):
|
|
|
|
if name not in MOCK_DHT_SEED_DNS:
|
|
|
|
return defer.fail(error.DNSLookupError(name))
|
|
|
|
return defer.succeed(MOCK_DHT_SEED_DNS[name])
|
|
|
|
|
|
|
|
|
|
|
|
class MockUDPTransport(object):
|
|
|
|
def __init__(self, address, port, max_packet_size, protocol):
|
|
|
|
self.address = address
|
|
|
|
self.port = port
|
|
|
|
self.max_packet_size = max_packet_size
|
|
|
|
self._node = protocol._node
|
|
|
|
|
|
|
|
def write(self, data, address):
|
2018-02-26 17:52:44 +01:00
|
|
|
dest = MockNetwork.peers[address][0]
|
2018-02-20 19:46:17 +01:00
|
|
|
debug_kademlia_packet(data, (self.address, self.port), address, self._node)
|
|
|
|
dest.datagramReceived(data, (self.address, self.port))
|
2017-05-31 20:15:15 +02:00
|
|
|
|
2018-02-20 19:46:17 +01:00
|
|
|
|
|
|
|
class MockUDPPort(object):
|
|
|
|
def __init__(self, protocol):
|
|
|
|
self.protocol = protocol
|
|
|
|
|
|
|
|
def startListening(self, reason=None):
|
|
|
|
return self.protocol.startProtocol()
|
|
|
|
|
|
|
|
def stopListening(self, reason=None):
|
|
|
|
return self.protocol.stopProtocol()
|
|
|
|
|
|
|
|
|
|
|
|
class MockNetwork(object):
|
2018-02-26 17:52:44 +01:00
|
|
|
peers = {} # (interface, port): (protocol, max_packet_size)
|
2018-02-20 19:46:17 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def add_peer(cls, port, protocol, interface, maxPacketSize):
|
|
|
|
interface = protocol._node.externalIP
|
|
|
|
protocol.transport = MockUDPTransport(interface, port, maxPacketSize, protocol)
|
2018-02-26 17:52:44 +01:00
|
|
|
cls.peers[(interface, port)] = (protocol, maxPacketSize)
|
2018-02-20 19:46:17 +01:00
|
|
|
|
|
|
|
|
|
|
|
def listenUDP(port, protocol, interface='', maxPacketSize=8192):
|
|
|
|
MockNetwork.add_peer(port, protocol, interface, maxPacketSize)
|
|
|
|
return MockUDPPort(protocol)
|
|
|
|
|
|
|
|
|
|
|
|
def address_generator(address=(10, 42, 42, 1)):
|
|
|
|
def increment(addr):
|
|
|
|
value = struct.unpack("I", "".join([chr(x) for x in list(addr)[::-1]]))[0] + 1
|
|
|
|
new_addr = []
|
|
|
|
for i in range(4):
|
|
|
|
new_addr.append(value % 256)
|
|
|
|
value >>= 8
|
|
|
|
return tuple(new_addr[::-1])
|
|
|
|
|
|
|
|
while True:
|
|
|
|
yield "{}.{}.{}.{}".format(*address)
|
|
|
|
address = increment(address)
|
|
|
|
|
|
|
|
|
|
|
|
def mock_node_generator(count=None, mock_node_ids=MOCK_DHT_NODES):
|
|
|
|
if mock_node_ids is None:
|
|
|
|
mock_node_ids = MOCK_DHT_NODES
|
|
|
|
|
|
|
|
for num, node_ip in enumerate(address_generator()):
|
|
|
|
if count and num >= count:
|
|
|
|
break
|
|
|
|
if num >= len(mock_node_ids):
|
|
|
|
node_id = generate_id().encode('hex')
|
|
|
|
else:
|
|
|
|
node_id = mock_node_ids[num]
|
|
|
|
yield (node_id, node_ip)
|