forked from LBRYCommunity/lbry-sdk
fixed all pylint errors
This commit is contained in:
parent
2abe85ba25
commit
91498822ae
27 changed files with 336 additions and 234 deletions
|
@ -285,7 +285,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
|
|||
ds.append(lbry_file_manager.stop())
|
||||
if server_port:
|
||||
ds.append(server_port.stopListening())
|
||||
ds.append(rm_db_and_blob_dir(db_dir, blob_dir))
|
||||
ds.append(rm_db_and_blob_dir(db_dir, blob_dir))
|
||||
kill_check.stop()
|
||||
dead_event.set()
|
||||
dl = defer.DeferredList(ds)
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from twisted.internet import defer, threads, error
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
@ -73,7 +69,8 @@ class TestReflector(unittest.TestCase):
|
|||
dht_node_class=Node
|
||||
)
|
||||
|
||||
self.stream_info_manager = EncryptedFileMetadataManager.DBEncryptedFileMetadataManager(self.db_dir)
|
||||
self.stream_info_manager = EncryptedFileMetadataManager.DBEncryptedFileMetadataManager(
|
||||
self.db_dir)
|
||||
|
||||
self.lbry_file_manager = EncryptedFileManager.EncryptedFileManager(
|
||||
self.session, self.stream_info_manager, sd_identifier)
|
||||
|
@ -81,7 +78,8 @@ class TestReflector(unittest.TestCase):
|
|||
self.server_db_dir, self.server_blob_dir = mk_db_and_blob_dir()
|
||||
self.server_blob_manager = BlobManager.DiskBlobManager(
|
||||
hash_announcer, self.server_blob_dir, self.server_db_dir)
|
||||
self.server_stream_info_manager = EncryptedFileMetadataManager.DBEncryptedFileMetadataManager(self.server_db_dir)
|
||||
self.server_stream_info_manager = \
|
||||
EncryptedFileMetadataManager.DBEncryptedFileMetadataManager(self.server_db_dir)
|
||||
|
||||
|
||||
d = self.session.setup()
|
||||
|
@ -124,7 +122,8 @@ class TestReflector(unittest.TestCase):
|
|||
return d
|
||||
|
||||
def start_server():
|
||||
server_factory = reflector.ServerFactory(peer_manager, self.server_blob_manager, self.server_stream_info_manager)
|
||||
server_factory = reflector.ServerFactory(
|
||||
peer_manager, self.server_blob_manager, self.server_stream_info_manager)
|
||||
from twisted.internet import reactor
|
||||
port = 8943
|
||||
while self.reflector_port is None:
|
||||
|
@ -181,7 +180,8 @@ class TestReflector(unittest.TestCase):
|
|||
blob_hashes = [b[0] for b in blobs if b[0] is not None]
|
||||
expected_blob_hashes = [b[0] for b in self.expected_blobs[:-1] if b[0] is not None]
|
||||
self.assertEqual(expected_blob_hashes, blob_hashes)
|
||||
sd_hashes = yield self.server_stream_info_manager.get_sd_blob_hashes_for_stream(self.stream_hash)
|
||||
sd_hashes = yield self.server_stream_info_manager.get_sd_blob_hashes_for_stream(
|
||||
self.stream_hash)
|
||||
self.assertEqual(1, len(sd_hashes))
|
||||
expected_sd_hash = self.expected_blobs[-1][0]
|
||||
self.assertEqual(self.sd_hash, sd_hashes[0])
|
||||
|
@ -309,7 +309,8 @@ class TestReflector(unittest.TestCase):
|
|||
blob_hashes = [b[0] for b in blobs if b[0] is not None]
|
||||
expected_blob_hashes = [b[0] for b in self.expected_blobs[:-1] if b[0] is not None]
|
||||
self.assertEqual(expected_blob_hashes, blob_hashes)
|
||||
sd_hashes = yield self.server_stream_info_manager.get_sd_blob_hashes_for_stream(self.stream_hash)
|
||||
sd_hashes = yield self.server_stream_info_manager.get_sd_blob_hashes_for_stream(
|
||||
self.stream_hash)
|
||||
self.assertEqual(1, len(sd_hashes))
|
||||
expected_sd_hash = self.expected_blobs[-1][0]
|
||||
self.assertEqual(self.sd_hash, sd_hashes[0])
|
||||
|
|
|
@ -17,7 +17,7 @@ from lbrynet.file_manager.EncryptedFileCreator import create_lbry_file
|
|||
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
||||
from lbrynet.lbry_file.StreamDescriptor import get_sd_info
|
||||
from lbrynet.core.PeerManager import PeerManager
|
||||
from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter
|
||||
from lbrynet.core.RateLimiter import DummyRateLimiter
|
||||
|
||||
from lbrynet.tests import mocks
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import io
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from decimal import Decimal
|
||||
from twisted.internet import defer
|
||||
|
||||
from lbrynet.core import PTCWallet
|
||||
|
@ -220,16 +219,37 @@ class BlobAvailabilityTracker(BlobAvailability.BlobAvailabilityTracker):
|
|||
|
||||
def __init__(self, blob_manager=None, peer_finder=None, dht_node=None):
|
||||
self.availability = {
|
||||
'91dc64cf1ff42e20d627b033ad5e4c3a4a96856ed8a6e3fb4cd5fa1cfba4bf72eefd325f579db92f45f4355550ace8e7': ['1.2.3.4'],
|
||||
'b2e48bb4c88cf46b76adf0d47a72389fae0cd1f19ed27dc509138c99509a25423a4cef788d571dca7988e1dca69e6fa0': ['1.2.3.4', '1.2.3.4'],
|
||||
'6af95cd062b4a179576997ef1054c9d2120f8592eea045e9667bea411d520262cd5a47b137eabb7a7871f5f8a79c92dd': ['1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'6d8017aba362e5c5d0046625a039513419810a0397d728318c328a5cc5d96efb589fbca0728e54fe5adbf87e9545ee07': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'5a450b416275da4bdff604ee7b58eaedc7913c5005b7184fc3bc5ef0b1add00613587f54217c91097fc039ed9eace9dd': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'd7c82e6cac093b3f16107d2ae2b2c75424f1fcad2c7fbdbe66e4a13c0b6bd27b67b3a29c403b82279ab0f7c1c48d6787': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'9dbda74a472a2e5861a5d18197aeba0f5de67c67e401124c243d2f0f41edf01d7a26aeb0b5fc9bf47f6361e0f0968e2c': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'8c70d5e2f5c3a6085006198e5192d157a125d92e7378794472007a61947992768926513fc10924785bdb1761df3c37e6': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
'c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('91dc64cf1ff42e20d627b033ad5e4c3a4a96856ed8a6e3fb',
|
||||
'4cd5fa1cfba4bf72eefd325f579db92f45f4355550ace8e7')): ['1.2.3.4'],
|
||||
''.join(('b2e48bb4c88cf46b76adf0d47a72389fae0cd1f19ed27dc5',
|
||||
'09138c99509a25423a4cef788d571dca7988e1dca69e6fa0')): ['1.2.3.4', '1.2.3.4'],
|
||||
''.join(('6af95cd062b4a179576997ef1054c9d2120f8592eea045e9',
|
||||
'667bea411d520262cd5a47b137eabb7a7871f5f8a79c92dd')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('6d8017aba362e5c5d0046625a039513419810a0397d72831',
|
||||
'8c328a5cc5d96efb589fbca0728e54fe5adbf87e9545ee07')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('5a450b416275da4bdff604ee7b58eaedc7913c5005b7184f',
|
||||
'c3bc5ef0b1add00613587f54217c91097fc039ed9eace9dd')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('d7c82e6cac093b3f16107d2ae2b2c75424f1fcad2c7fbdbe',
|
||||
'66e4a13c0b6bd27b67b3a29c403b82279ab0f7c1c48d6787')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('9dbda74a472a2e5861a5d18197aeba0f5de67c67e401124c',
|
||||
'243d2f0f41edf01d7a26aeb0b5fc9bf47f6361e0f0968e2c')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
''.join(('8c70d5e2f5c3a6085006198e5192d157a125d92e73787944',
|
||||
'72007a61947992768926513fc10924785bdb1761df3c37e6')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
||||
'1.2.3.4'],
|
||||
''.join(('f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0',
|
||||
'b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
||||
'1.2.3.4', '1.2.3.4'],
|
||||
''.join(('c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835',
|
||||
'eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c')):
|
||||
['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4',
|
||||
'1.2.3.4', '1.2.3.4', '1.2.3.4'],
|
||||
}
|
||||
self._blob_manager = None
|
||||
self._peer_finder = PeerFinder(11223, 11224, 2)
|
||||
|
@ -252,19 +272,22 @@ create_stream_sd_file = {
|
|||
{
|
||||
'length': 2097152,
|
||||
'blob_num': 0,
|
||||
'blob_hash': 'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
|
||||
'blob_hash': ''.join(('dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b',
|
||||
'441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586')),
|
||||
'iv': '30303030303030303030303030303031'
|
||||
},
|
||||
{
|
||||
'length': 2097152,
|
||||
'blob_num': 1,
|
||||
'blob_hash': 'f4067522c1b49432a2a679512e3917144317caa1abba0c041e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
|
||||
'blob_hash': ''.join(('f4067522c1b49432a2a679512e3917144317caa1abba0c04',
|
||||
'1e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70')),
|
||||
'iv': '30303030303030303030303030303032'
|
||||
},
|
||||
{
|
||||
'length': 1015056,
|
||||
'blob_num': 2,
|
||||
'blob_hash': '305486c434260484fcb2968ce0e963b72f81ba56c11b08b1af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
|
||||
'blob_hash': ''.join(('305486c434260484fcb2968ce0e963b72f81ba56c11b08b1',
|
||||
'af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9')),
|
||||
'iv': '30303030303030303030303030303033'
|
||||
},
|
||||
{'length': 0, 'blob_num': 3, 'iv': '30303030303030303030303030303034'}
|
||||
|
@ -272,7 +295,8 @@ create_stream_sd_file = {
|
|||
'stream_type': 'lbryfile',
|
||||
'key': '30313233343536373031323334353637',
|
||||
'suggested_file_name': '746573745f66696c65',
|
||||
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'
|
||||
'stream_hash': ''.join(('6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d',
|
||||
'315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'))
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,20 +1,15 @@
|
|||
import sys
|
||||
import time
|
||||
import logging
|
||||
|
||||
from lbrynet.core import log_support
|
||||
from lbrynet.core.client.ClientRequest import ClientRequest
|
||||
from lbrynet.core.server.ServerProtocol import ServerProtocol
|
||||
from lbrynet.core.client.ClientProtocol import ClientProtocol
|
||||
from lbrynet.core.RateLimiter import RateLimiter
|
||||
from lbrynet.core.Peer import Peer
|
||||
from lbrynet.core.PeerManager import PeerManager
|
||||
from lbrynet.core.Error import ConnectionClosedBeforeResponseError, NoResponseError
|
||||
from lbrynet.core.Error import NoResponseError
|
||||
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, reactor, task
|
||||
from twisted.internet.task import deferLater
|
||||
from twisted.internet.protocol import Protocol, ServerFactory
|
||||
from twisted.internet.protocol import ServerFactory
|
||||
from lbrynet import conf
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.interfaces import IQueryHandlerFactory, IQueryHandler, IRequestCreator
|
||||
|
@ -153,7 +148,8 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
self.server_port = reactor.listenTCP(PEER_PORT, self.server, interface=LOCAL_HOST)
|
||||
yield self.connection_manager.manage(schedule_next_call=False)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertTrue(connection_made)
|
||||
self.assertEqual(1, self.TEST_PEER.success_count)
|
||||
|
@ -166,7 +162,8 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
self.server_port = reactor.listenTCP(PEER_PORT, self.server, interface=LOCAL_HOST)
|
||||
yield self.connection_manager.manage(schedule_next_call=False)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertTrue(connection_made)
|
||||
self.assertEqual(0, self.TEST_PEER.success_count)
|
||||
|
@ -179,7 +176,8 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
self._init_connection_manager()
|
||||
yield self.connection_manager.manage(schedule_next_call=False)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertFalse(connection_made)
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
|
@ -199,12 +197,14 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
self.assertEqual(2, self.connection_manager.num_peer_connections())
|
||||
self.assertIn(self.TEST_PEER, self.connection_manager._peer_connections)
|
||||
self.assertIn(test_peer2, self.connection_manager._peer_connections)
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertFalse(connection_made)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
self.assertEqual(0, self.TEST_PEER.success_count)
|
||||
self.assertEqual(1, self.TEST_PEER.down_count)
|
||||
connection_made = yield self.connection_manager._peer_connections[test_peer2].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[test_peer2].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertFalse(connection_made)
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertEqual(0, test_peer2.success_count)
|
||||
|
@ -227,19 +227,21 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
@defer.inlineCallbacks
|
||||
def test_closed_connection_when_server_is_slow(self):
|
||||
self._init_connection_manager()
|
||||
self.server = MocServerProtocolFactory(self.clock, has_moc_query_handler=True,is_delayed=True)
|
||||
self.server = MocServerProtocolFactory(
|
||||
self.clock, has_moc_query_handler=True, is_delayed=True)
|
||||
self.server_port = reactor.listenTCP(PEER_PORT, self.server, interface=LOCAL_HOST)
|
||||
|
||||
yield self.connection_manager.manage(schedule_next_call=False)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertEqual(True, connection_made)
|
||||
self.assertEqual(0, self.TEST_PEER.success_count)
|
||||
self.assertEqual(1, self.TEST_PEER.down_count)
|
||||
|
||||
|
||||
""" test header first seeks """
|
||||
# test header first seeks
|
||||
@defer.inlineCallbacks
|
||||
def test_no_peer_for_head_blob(self):
|
||||
# test that if we can't find blobs for the head blob,
|
||||
|
@ -253,7 +255,8 @@ class TestIntegrationConnectionManager(unittest.TestCase):
|
|||
|
||||
yield self.connection_manager.manage(schedule_next_call=False)
|
||||
self.assertEqual(1, self.connection_manager.num_peer_connections())
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].factory.connection_was_made_deferred
|
||||
connection_made = yield self.connection_manager._peer_connections[self.TEST_PEER].\
|
||||
factory.connection_was_made_deferred
|
||||
self.assertEqual(0, self.connection_manager.num_peer_connections())
|
||||
self.assertTrue(connection_made)
|
||||
self.assertEqual(1, self.TEST_PEER.success_count)
|
||||
|
|
|
@ -8,7 +8,8 @@ from twisted.trial import unittest
|
|||
from lbrynet.core import Peer
|
||||
from lbrynet.core.server import BlobRequestHandler
|
||||
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
|
||||
from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
from lbrynet.tests.mocks\
|
||||
import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
|
||||
|
||||
class TestBlobRequestHandlerQueries(unittest.TestCase):
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
from twisted.trial import unittest
|
||||
from twisted.internet import defer,task
|
||||
from twisted.internet import defer, task
|
||||
|
||||
from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer,DHTHashSupplier
|
||||
from lbrynet.core.utils import random_string
|
||||
from lbrynet.core import log_support, utils
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.tests.util import random_lbry_hash
|
||||
|
||||
class MocDHTNode(object):
|
||||
def __init__(self):
|
||||
self.blobs_announced = 0
|
||||
|
||||
def announceHaveBlob(self,blob,port):
|
||||
def announceHaveBlob(self, blob, port):
|
||||
self.blobs_announced += 1
|
||||
return defer.succeed(True)
|
||||
|
||||
|
@ -35,14 +33,14 @@ class DHTHashAnnouncerTest(unittest.TestCase):
|
|||
self.clock = task.Clock()
|
||||
self.dht_node = MocDHTNode()
|
||||
utils.call_later = self.clock.callLater
|
||||
from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer,DHTHashSupplier
|
||||
from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer
|
||||
self.announcer = DHTHashAnnouncer(self.dht_node, peer_port=3333)
|
||||
self.supplier = MocSupplier(self.blobs_to_announce)
|
||||
self.announcer.add_supplier(self.supplier)
|
||||
|
||||
def test_basic(self):
|
||||
self.announcer._announce_available_hashes()
|
||||
self.assertEqual(self.announcer.hash_queue_size(),self.announcer.CONCURRENT_ANNOUNCERS)
|
||||
self.assertEqual(self.announcer.hash_queue_size(), self.announcer.CONCURRENT_ANNOUNCERS)
|
||||
self.clock.advance(1)
|
||||
self.assertEqual(self.dht_node.blobs_announced, self.num_blobs)
|
||||
self.assertEqual(self.announcer.hash_queue_size(), 0)
|
||||
|
@ -52,6 +50,6 @@ class DHTHashAnnouncerTest(unittest.TestCase):
|
|||
self.announcer._announce_available_hashes()
|
||||
blob_hash = random_lbry_hash()
|
||||
self.announcer.immediate_announce([blob_hash])
|
||||
self.assertEqual(self.announcer.hash_queue_size(),self.announcer.CONCURRENT_ANNOUNCERS+1)
|
||||
self.assertEqual(self.announcer.hash_queue_size(), self.announcer.CONCURRENT_ANNOUNCERS+1)
|
||||
self.assertEqual(blob_hash, self.announcer.hash_queue[0][0])
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import tempfile
|
||||
import shutil
|
||||
import mock
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
|
@ -20,9 +19,9 @@ class BlobManagerTest(unittest.TestCase):
|
|||
conf.initialize_settings()
|
||||
self.blob_dir = tempfile.mkdtemp()
|
||||
self.db_dir = tempfile.mkdtemp()
|
||||
hash_announcer = DummyHashAnnouncer()
|
||||
hash_announcer = DummyHashAnnouncer()
|
||||
self.bm = DiskBlobManager(hash_announcer, self.blob_dir, self.db_dir)
|
||||
self.peer = Peer('somehost',22)
|
||||
self.peer = Peer('somehost', 22)
|
||||
|
||||
def tearDown(self):
|
||||
self.bm.stop()
|
||||
|
@ -35,25 +34,25 @@ class BlobManagerTest(unittest.TestCase):
|
|||
@defer.inlineCallbacks
|
||||
def _create_and_add_blob(self):
|
||||
# create and add blob to blob manager
|
||||
data_len = random.randint(1,1000)
|
||||
data_len = random.randint(1, 1000)
|
||||
data = ''.join(random.choice(string.lowercase) for data_len in range(data_len))
|
||||
|
||||
hashobj = get_lbry_hash_obj()
|
||||
hashobj.update(data)
|
||||
out=hashobj.hexdigest()
|
||||
blob_hash=out
|
||||
out = hashobj.hexdigest()
|
||||
blob_hash = out
|
||||
|
||||
# create new blob
|
||||
yield self.bm.setup()
|
||||
blob = yield self.bm.get_blob(blob_hash,len(data))
|
||||
blob = yield self.bm.get_blob(blob_hash, len(data))
|
||||
|
||||
writer, finished_d = yield blob.open_for_writing(self.peer)
|
||||
yield writer.write(data)
|
||||
yield self.bm.blob_completed(blob)
|
||||
yield self.bm.add_blob_to_upload_history(blob_hash,'test',len(data))
|
||||
yield self.bm.add_blob_to_upload_history(blob_hash, 'test', len(data))
|
||||
|
||||
# check to see if blob is there
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir,blob_hash)))
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertTrue(blob_hash in blobs)
|
||||
defer.returnValue(blob_hash)
|
||||
|
@ -63,31 +62,31 @@ class BlobManagerTest(unittest.TestCase):
|
|||
blob_hashes = []
|
||||
|
||||
# create a bunch of blobs
|
||||
for i in range(0,10):
|
||||
for i in range(0, 10):
|
||||
blob_hash = yield self._create_and_add_blob()
|
||||
blob_hashes.append(blob_hash)
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertEqual(10,len(blobs))
|
||||
self.assertEqual(10, len(blobs))
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_delete_blob(self):
|
||||
# create blob
|
||||
blob_hash = yield self._create_and_add_blob()
|
||||
blob_hash = yield self._create_and_add_blob()
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertEqual(len(blobs),1)
|
||||
self.assertEqual(len(blobs), 1)
|
||||
|
||||
# delete blob
|
||||
# delete blob
|
||||
yield self.bm.delete_blobs([blob_hash])
|
||||
self.assertFalse(os.path.isfile(os.path.join(self.blob_dir,blob_hash)))
|
||||
self.assertFalse(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertEqual(len(blobs),0)
|
||||
blobs = yield self.bm._get_all_blob_hashes()
|
||||
self.assertEqual(len(blobs),0)
|
||||
self.assertEqual(len(blobs), 0)
|
||||
blobs = yield self.bm._get_all_blob_hashes()
|
||||
self.assertEqual(len(blobs), 0)
|
||||
|
||||
# delete blob that does not exist, nothing will
|
||||
# happen
|
||||
blob_hash= random_lbry_hash()
|
||||
blob_hash = random_lbry_hash()
|
||||
out = yield self.bm.delete_blobs([blob_hash])
|
||||
|
||||
|
||||
|
@ -96,12 +95,12 @@ class BlobManagerTest(unittest.TestCase):
|
|||
# Test that a blob that is opened for writing will not be deleted
|
||||
|
||||
# create blobs
|
||||
blob_hashes =[]
|
||||
for i in range(0,10):
|
||||
blob_hash = yield self._create_and_add_blob()
|
||||
blob_hashes = []
|
||||
for i in range(0, 10):
|
||||
blob_hash = yield self._create_and_add_blob()
|
||||
blob_hashes.append(blob_hash)
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertEqual(len(blobs),10)
|
||||
self.assertEqual(len(blobs), 10)
|
||||
|
||||
# open the last blob
|
||||
blob = yield self.bm.get_blob(blob_hashes[-1])
|
||||
|
@ -110,7 +109,6 @@ class BlobManagerTest(unittest.TestCase):
|
|||
# delete the last blob and check if it still exists
|
||||
out = yield self.bm.delete_blobs([blob_hash])
|
||||
blobs = yield self.bm.get_all_verified_blobs()
|
||||
self.assertEqual(len(blobs),10)
|
||||
self.assertEqual(len(blobs), 10)
|
||||
self.assertTrue(blob_hashes[-1] in blobs)
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir,blob_hashes[-1])))
|
||||
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hashes[-1])))
|
||||
|
|
|
@ -5,8 +5,6 @@ from lbrynet.core.Error import DownloadCanceledError, InvalidDataError
|
|||
from lbrynet.tests.util import mk_db_and_blob_dir, rm_db_and_blob_dir, random_lbry_hash
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
class BlobFileTest(unittest.TestCase):
|
||||
|
@ -14,7 +12,8 @@ class BlobFileTest(unittest.TestCase):
|
|||
self.db_dir, self.blob_dir = mk_db_and_blob_dir()
|
||||
self.fake_content_len = 64
|
||||
self.fake_content = bytearray('0'*self.fake_content_len)
|
||||
self.fake_content_hash = '53871b26a08e90cb62142f2a39f0b80de41792322b0ca5602b6eb7b5cf067c49498a7492bb9364bbf90f40c1c5412105'
|
||||
self.fake_content_hash = ''.join(('53871b26a08e90cb62142f2a39f0b80de41792322b0ca560',
|
||||
'2b6eb7b5cf067c49498a7492bb9364bbf90f40c1c5412105'))
|
||||
|
||||
def tearDown(self):
|
||||
rm_db_and_blob_dir(self.db_dir, self.blob_dir)
|
||||
|
@ -61,7 +60,7 @@ class BlobFileTest(unittest.TestCase):
|
|||
@defer.inlineCallbacks
|
||||
def test_too_much_write(self):
|
||||
# writing too much data should result in failure
|
||||
expected_length= 16
|
||||
expected_length = 16
|
||||
content = bytearray('0'*32)
|
||||
blob_hash = random_lbry_hash()
|
||||
blob_file = BlobFile(self.blob_dir, blob_hash, expected_length)
|
||||
|
@ -73,7 +72,7 @@ class BlobFileTest(unittest.TestCase):
|
|||
def test_bad_hash(self):
|
||||
# test a write that should fail because its content's hash
|
||||
# does not equal the blob_hash
|
||||
length= 64
|
||||
length = 64
|
||||
content = bytearray('0'*length)
|
||||
blob_hash = random_lbry_hash()
|
||||
blob_file = BlobFile(self.blob_dir, blob_hash, length)
|
||||
|
|
|
@ -5,31 +5,44 @@ import mock
|
|||
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager, BasePaymentRateManager
|
||||
from lbrynet.core.Strategy import BasicAvailabilityWeightedStrategy
|
||||
from lbrynet.core.Offer import Offer
|
||||
from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
from lbrynet.tests.mocks\
|
||||
import BlobAvailabilityTracker as DummyBlobAvailabilityTracker, mock_conf_settings
|
||||
|
||||
MAX_NEGOTIATION_TURNS = 10
|
||||
random.seed(12345)
|
||||
|
||||
|
||||
def get_random_sample(list_to_sample):
|
||||
result = list_to_sample[random.randint(1, len(list_to_sample)):random.randint(1, len(list_to_sample))]
|
||||
result = list_to_sample[
|
||||
random.randint(1, len(list_to_sample)):random.randint(1, len(list_to_sample))]
|
||||
if not result:
|
||||
return get_random_sample(list_to_sample)
|
||||
return result
|
||||
|
||||
|
||||
def calculate_negotation_turns(client_base, host_base, host_is_generous=True, client_is_generous=True):
|
||||
def calculate_negotation_turns(client_base, host_base, host_is_generous=True,
|
||||
client_is_generous=True):
|
||||
blobs = [
|
||||
'b2e48bb4c88cf46b76adf0d47a72389fae0cd1f19ed27dc509138c99509a25423a4cef788d571dca7988e1dca69e6fa0',
|
||||
'd7c82e6cac093b3f16107d2ae2b2c75424f1fcad2c7fbdbe66e4a13c0b6bd27b67b3a29c403b82279ab0f7c1c48d6787',
|
||||
'5a450b416275da4bdff604ee7b58eaedc7913c5005b7184fc3bc5ef0b1add00613587f54217c91097fc039ed9eace9dd',
|
||||
'f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0',
|
||||
'9dbda74a472a2e5861a5d18197aeba0f5de67c67e401124c243d2f0f41edf01d7a26aeb0b5fc9bf47f6361e0f0968e2c',
|
||||
'91dc64cf1ff42e20d627b033ad5e4c3a4a96856ed8a6e3fb4cd5fa1cfba4bf72eefd325f579db92f45f4355550ace8e7',
|
||||
'6d8017aba362e5c5d0046625a039513419810a0397d728318c328a5cc5d96efb589fbca0728e54fe5adbf87e9545ee07',
|
||||
'6af95cd062b4a179576997ef1054c9d2120f8592eea045e9667bea411d520262cd5a47b137eabb7a7871f5f8a79c92dd',
|
||||
'8c70d5e2f5c3a6085006198e5192d157a125d92e7378794472007a61947992768926513fc10924785bdb1761df3c37e6',
|
||||
'c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c'
|
||||
''.join(('b2e48bb4c88cf46b76adf0d47a72389fae0cd1f19ed27dc5',
|
||||
'09138c99509a25423a4cef788d571dca7988e1dca69e6fa0')),
|
||||
''.join(('d7c82e6cac093b3f16107d2ae2b2c75424f1fcad2c7fbdbe',
|
||||
'66e4a13c0b6bd27b67b3a29c403b82279ab0f7c1c48d6787')),
|
||||
''.join(('5a450b416275da4bdff604ee7b58eaedc7913c5005b7184f',
|
||||
'c3bc5ef0b1add00613587f54217c91097fc039ed9eace9dd')),
|
||||
''.join(('f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0',
|
||||
'b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0')),
|
||||
''.join(('9dbda74a472a2e5861a5d18197aeba0f5de67c67e401124c',
|
||||
'243d2f0f41edf01d7a26aeb0b5fc9bf47f6361e0f0968e2c')),
|
||||
''.join(('91dc64cf1ff42e20d627b033ad5e4c3a4a96856ed8a6e3fb',
|
||||
'4cd5fa1cfba4bf72eefd325f579db92f45f4355550ace8e7')),
|
||||
''.join(('6d8017aba362e5c5d0046625a039513419810a0397d72831',
|
||||
'8c328a5cc5d96efb589fbca0728e54fe5adbf87e9545ee07')),
|
||||
''.join(('6af95cd062b4a179576997ef1054c9d2120f8592eea045e9',
|
||||
'667bea411d520262cd5a47b137eabb7a7871f5f8a79c92dd')),
|
||||
''.join(('8c70d5e2f5c3a6085006198e5192d157a125d92e73787944',
|
||||
'72007a61947992768926513fc10924785bdb1761df3c37e6')),
|
||||
''.join(('c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835',
|
||||
'eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c'))
|
||||
]
|
||||
|
||||
host = mock.Mock()
|
||||
|
|
|
@ -20,7 +20,10 @@ test_metadata = {
|
|||
test_claim_dict = {
|
||||
'version':'_0_0_1',
|
||||
'claimType':'streamType',
|
||||
'stream':{'metadata':test_metadata, 'version':'_0_0_1','source':{'source':'8655f713819344980a9a0d67b198344e2c462c90f813e86f0c63789ab0868031f25c54d0bb31af6658e997e2041806eb','sourceType':'lbry_sd_hash','contentType':'video/mp4','version':'_0_0_1'},
|
||||
'stream':{'metadata':test_metadata, 'version':'_0_0_1', 'source':
|
||||
{'source': ''.join(('8655f713819344980a9a0d67b198344e2c462c90f813e86f',
|
||||
'0c63789ab0868031f25c54d0bb31af6658e997e2041806eb')),
|
||||
'sourceType': 'lbry_sd_hash', 'contentType': 'video/mp4', 'version': '_0_0_1'},
|
||||
}}
|
||||
|
||||
|
||||
|
@ -84,7 +87,7 @@ class WalletTest(unittest.TestCase):
|
|||
MocLbryumWallet._support_claim = failed_support_claim
|
||||
wallet = MocLbryumWallet()
|
||||
d = wallet.support_claim('test', "f43dc06256a69988bdbea09a58c80493ba15dcfa", 1)
|
||||
self.assertFailure(d,Exception)
|
||||
self.assertFailure(d, Exception)
|
||||
return d
|
||||
|
||||
def test_succesful_support(self):
|
||||
|
@ -150,20 +153,20 @@ class WalletTest(unittest.TestCase):
|
|||
d = wallet.update_balance()
|
||||
# test point reservation
|
||||
d.addCallback(lambda _: self.assertEqual(5, wallet.get_balance()))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid',2))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid', 2))
|
||||
d.addCallback(lambda _: self.assertEqual(3, wallet.get_balance()))
|
||||
d.addCallback(lambda _: self.assertEqual(2, wallet.total_reserved_points))
|
||||
# test reserved points cancellation
|
||||
d.addCallback(lambda _: wallet.cancel_point_reservation(ReservedPoints('testid',2)))
|
||||
d.addCallback(lambda _: wallet.cancel_point_reservation(ReservedPoints('testid', 2)))
|
||||
d.addCallback(lambda _: self.assertEqual(5, wallet.get_balance()))
|
||||
d.addCallback(lambda _: self.assertEqual(0, wallet.total_reserved_points))
|
||||
# test point sending
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid',2))
|
||||
d.addCallback(lambda reserve_points: wallet.send_points_to_address(reserve_points,1))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid', 2))
|
||||
d.addCallback(lambda reserve_points: wallet.send_points_to_address(reserve_points, 1))
|
||||
d.addCallback(lambda _: self.assertEqual(3, wallet.get_balance()))
|
||||
# test failed point reservation
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid',4))
|
||||
d.addCallback(lambda out: self.assertEqual(None,out))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid', 4))
|
||||
d.addCallback(lambda out: self.assertEqual(None, out))
|
||||
return d
|
||||
|
||||
def test_point_reservation_and_claim(self):
|
||||
|
@ -174,9 +177,9 @@ class WalletTest(unittest.TestCase):
|
|||
wallet._update_balance = update_balance
|
||||
d = wallet.update_balance()
|
||||
d.addCallback(lambda _: self.assertEqual(5, wallet.get_balance()))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid',2))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid', 2))
|
||||
d.addCallback(lambda _: wallet.claim_name('test', 4, test_claim_dict))
|
||||
self.assertFailure(d,InsufficientFundsError)
|
||||
self.assertFailure(d, InsufficientFundsError)
|
||||
return d
|
||||
|
||||
def test_point_reservation_and_support(self):
|
||||
|
@ -187,7 +190,8 @@ class WalletTest(unittest.TestCase):
|
|||
wallet._update_balance = update_balance
|
||||
d = wallet.update_balance()
|
||||
d.addCallback(lambda _: self.assertEqual(5, wallet.get_balance()))
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid',2))
|
||||
d.addCallback(lambda _: wallet.support_claim('test', "f43dc06256a69988bdbea09a58c80493ba15dcfa", 4))
|
||||
self.assertFailure(d,InsufficientFundsError)
|
||||
d.addCallback(lambda _: wallet.reserve_points('testid', 2))
|
||||
d.addCallback(lambda _: wallet.support_claim(
|
||||
'test', "f43dc06256a69988bdbea09a58c80493ba15dcfa", 4))
|
||||
self.assertFailure(d, InsufficientFundsError)
|
||||
return d
|
||||
|
|
|
@ -7,11 +7,8 @@ from lbrynet.tests.mocks import mock_conf_settings
|
|||
|
||||
from Crypto import Random
|
||||
from Crypto.Cipher import AES
|
||||
import tempfile
|
||||
import random
|
||||
import string
|
||||
import StringIO
|
||||
import time
|
||||
|
||||
class MocBlob(object):
|
||||
def __init__(self):
|
||||
|
@ -30,7 +27,7 @@ class MocBlob(object):
|
|||
|
||||
|
||||
def random_string(length):
|
||||
return ''.join(random.choice(string.lowercase) for i in range(length))
|
||||
return ''.join(random.choice(string.lowercase) for i in range(length))
|
||||
|
||||
|
||||
class TestCryptBlob(unittest.TestCase):
|
||||
|
@ -50,9 +47,9 @@ class TestCryptBlob(unittest.TestCase):
|
|||
string_to_encrypt = random_string(size_of_data)
|
||||
|
||||
# encrypt string
|
||||
done,num_bytes = maker.write(string_to_encrypt)
|
||||
done, num_bytes = maker.write(string_to_encrypt)
|
||||
yield maker.close()
|
||||
self.assertEqual(size_of_data,num_bytes)
|
||||
self.assertEqual(size_of_data, num_bytes)
|
||||
expected_encrypted_blob_size = ((size_of_data / AES.block_size) + 1) * AES.block_size
|
||||
self.assertEqual(expected_encrypted_blob_size, len(blob.data))
|
||||
|
||||
|
@ -68,7 +65,7 @@ class TestCryptBlob(unittest.TestCase):
|
|||
# decrypt string
|
||||
decryptor = CryptBlob.StreamBlobDecryptor(blob, key, iv, size_of_data)
|
||||
decryptor.decrypt(write_func)
|
||||
self.assertEqual(self.data_buf,string_to_encrypt)
|
||||
self.assertEqual(self.data_buf, string_to_encrypt)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_encrypt_decrypt(self):
|
||||
|
|
|
@ -36,7 +36,7 @@ class ContactOperatorsTest(unittest.TestCase):
|
|||
def testIllogicalComparisons(self):
|
||||
""" Test comparisons with non-Contact and non-str types """
|
||||
msg = '"{}" operator: Contact object should not be equal to {} type'
|
||||
for item in (123, [1,2,3], {'key': 'value'}):
|
||||
for item in (123, [1, 2, 3], {'key': 'value'}):
|
||||
self.failIfEqual(
|
||||
self.firstContact, item,
|
||||
msg.format('eq', type(item).__name__))
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
|
||||
import unittest
|
||||
import time
|
||||
import datetime
|
||||
import random
|
||||
|
||||
import lbrynet.dht.datastore
|
||||
import lbrynet.dht.constants
|
||||
|
@ -35,7 +33,7 @@ class DictDataStoreTest(unittest.TestCase):
|
|||
(hashKey2, 'test4'),
|
||||
(hashKey3, 'test5'),
|
||||
(hashKey3, 'test6'))
|
||||
|
||||
|
||||
def testReadWrite(self):
|
||||
# Test write ability
|
||||
for key, value in self.cases:
|
||||
|
@ -44,23 +42,32 @@ class DictDataStoreTest(unittest.TestCase):
|
|||
self.ds.addPeerToBlob(key, value, now, now, 'node1')
|
||||
except Exception:
|
||||
import traceback
|
||||
self.fail('Failed writing the following data: key: "%s", data: "%s"\n The error was: %s:' % (key, value, traceback.format_exc(5)))
|
||||
|
||||
self.fail(''.join(('Failed writing the following data: key: "%s", ',
|
||||
'data: "%s"\n The error was: %s:')) %
|
||||
(key, value, traceback.format_exc(5)))
|
||||
|
||||
# Verify writing (test query ability)
|
||||
for key, value in self.cases:
|
||||
try:
|
||||
self.failUnless(self.ds.hasPeersForBlob(key), 'Key "%s" not found in DataStore! DataStore key dump: %s' % (key, self.ds.keys()))
|
||||
self.failUnless(self.ds.hasPeersForBlob(key),
|
||||
'Key "%s" not found in DataStore! DataStore key dump: %s' %
|
||||
(key, self.ds.keys()))
|
||||
except Exception:
|
||||
import traceback
|
||||
self.fail('Failed verifying that the following key exists: "%s"\n The error was: %s:' % (key, traceback.format_exc(5)))
|
||||
|
||||
self.fail(
|
||||
'Failed verifying that the following key exists: "%s"\n The error was: %s:' %
|
||||
(key, traceback.format_exc(5)))
|
||||
|
||||
# Read back the data
|
||||
for key, value in self.cases:
|
||||
self.failUnless(value in self.ds.getPeersForBlob(key), 'DataStore returned invalid data! Expected "%s", got "%s"' % (value, self.ds.getPeersForBlob(key)))
|
||||
|
||||
self.failUnless(value in self.ds.getPeersForBlob(key),
|
||||
'DataStore returned invalid data! Expected "%s", got "%s"' %
|
||||
(value, self.ds.getPeersForBlob(key)))
|
||||
|
||||
def testNonExistentKeys(self):
|
||||
for key, value in self.cases:
|
||||
self.failIf(key in self.ds.keys(), 'DataStore reports it has non-existent key: "%s"' % key)
|
||||
self.failIf(key in self.ds.keys(), 'DataStore reports it has non-existent key: "%s"' %
|
||||
key)
|
||||
|
||||
def testExpires(self):
|
||||
now = int(time.time())
|
||||
|
@ -78,11 +85,25 @@ class DictDataStoreTest(unittest.TestCase):
|
|||
self.ds.addPeerToBlob(h2, 'val3', now - td2, now - td2, '3')
|
||||
self.ds.addPeerToBlob(h2, 'val4', now, now, '4')
|
||||
self.ds.removeExpiredPeers()
|
||||
self.failUnless('val1' in self.ds.getPeersForBlob(h1), 'DataStore deleted an unexpired value! Value %s, publish time %s, current time %s' % ('val1', str(now - td), str(now)))
|
||||
self.failIf('val2' in self.ds.getPeersForBlob(h1), 'DataStore failed to delete an expired value! Value %s, publish time %s, current time %s' % ('val2', str(now - td2), str(now)))
|
||||
self.failIf('val3' in self.ds.getPeersForBlob(h2), 'DataStore failed to delete an expired value! Value %s, publish time %s, current time %s' % ('val3', str(now - td2), str(now)))
|
||||
self.failUnless('val4' in self.ds.getPeersForBlob(h2), 'DataStore deleted an unexpired value! Value %s, publish time %s, current time %s' % ('val4', str(now), str(now)))
|
||||
|
||||
self.failUnless(
|
||||
'val1' in self.ds.getPeersForBlob(h1),
|
||||
'DataStore deleted an unexpired value! Value %s, publish time %s, current time %s' %
|
||||
('val1', str(now - td), str(now)))
|
||||
self.failIf(
|
||||
'val2' in self.ds.getPeersForBlob(h1),
|
||||
''.join(('DataStore failed to delete an expired value! ',
|
||||
'Value %s, publish time %s, current time %s')) %
|
||||
('val2', str(now - td2), str(now)))
|
||||
self.failIf(
|
||||
'val3' in self.ds.getPeersForBlob(h2),
|
||||
''.join(('DataStore failed to delete an expired value! ',
|
||||
'Value %s, publish time %s, current time %s')) %
|
||||
('val3', str(now - td2), str(now)))
|
||||
self.failUnless(
|
||||
'val4' in self.ds.getPeersForBlob(h2),
|
||||
'DataStore deleted an unexpired value! Value %s, publish time %s, current time %s' %
|
||||
('val4', str(now), str(now)))
|
||||
|
||||
# # First write with fake values
|
||||
# for key, value in self.cases:
|
||||
# except Exception:
|
||||
|
|
|
@ -15,27 +15,36 @@ class BencodeTest(unittest.TestCase):
|
|||
# Thanks goes to wikipedia for the initial test cases ;-)
|
||||
self.cases = ((42, 'i42e'),
|
||||
('spam', '4:spam'),
|
||||
(['spam',42], 'l4:spami42ee'),
|
||||
(['spam', 42], 'l4:spami42ee'),
|
||||
({'foo':42, 'bar':'spam'}, 'd3:bar4:spam3:fooi42ee'),
|
||||
# ...and now the "real life" tests
|
||||
([['abc', '127.0.0.1', 1919], ['def', '127.0.0.1', 1921]], 'll3:abc9:127.0.0.1i1919eel3:def9:127.0.0.1i1921eee'))
|
||||
# The following test cases are "bad"; i.e. sending rubbish into the decoder to test what exceptions get thrown
|
||||
([['abc', '127.0.0.1', 1919], ['def', '127.0.0.1', 1921]],
|
||||
'll3:abc9:127.0.0.1i1919eel3:def9:127.0.0.1i1921eee'))
|
||||
# The following test cases are "bad"; i.e. sending rubbish into the decoder to test
|
||||
# what exceptions get thrown
|
||||
self.badDecoderCases = ('abcdefghijklmnopqrstuvwxyz',
|
||||
'')
|
||||
|
||||
'')
|
||||
|
||||
def testEncoder(self):
|
||||
""" Tests the bencode encoder """
|
||||
for value, encodedValue in self.cases:
|
||||
result = self.encoding.encode(value)
|
||||
self.failUnlessEqual(result, encodedValue, 'Value "%s" not correctly encoded! Expected "%s", got "%s"' % (value, encodedValue, result))
|
||||
|
||||
self.failUnlessEqual(
|
||||
result, encodedValue,
|
||||
'Value "%s" not correctly encoded! Expected "%s", got "%s"' %
|
||||
(value, encodedValue, result))
|
||||
|
||||
def testDecoder(self):
|
||||
""" Tests the bencode decoder """
|
||||
for value, encodedValue in self.cases:
|
||||
result = self.encoding.decode(encodedValue)
|
||||
self.failUnlessEqual(result, value, 'Value "%s" not correctly decoded! Expected "%s", got "%s"' % (encodedValue, value, result))
|
||||
self.failUnlessEqual(
|
||||
result, value,
|
||||
'Value "%s" not correctly decoded! Expected "%s", got "%s"' %
|
||||
(encodedValue, value, result))
|
||||
for encodedValue in self.badDecoderCases:
|
||||
self.failUnlessRaises(lbrynet.dht.encoding.DecodeError, self.encoding.decode, encodedValue)
|
||||
self.failUnlessRaises(
|
||||
lbrynet.dht.encoding.DecodeError, self.encoding.decode, encodedValue)
|
||||
|
||||
def suite():
|
||||
suite = unittest.TestSuite()
|
||||
|
|
|
@ -22,65 +22,76 @@ class KBucketTest(unittest.TestCase):
|
|||
for i in range(constants.k):
|
||||
tmpContact = contact.Contact('tempContactID%d' % i, str(i), i, i)
|
||||
self.kbucket.addContact(tmpContact)
|
||||
self.failUnlessEqual(self.kbucket._contacts[i], tmpContact, "Contact in position %d not the same as the newly-added contact" % i)
|
||||
self.failUnlessEqual(
|
||||
self.kbucket._contacts[i],
|
||||
tmpContact,
|
||||
"Contact in position %d not the same as the newly-added contact" % i)
|
||||
|
||||
# Test if contact is not added to full list
|
||||
i += 1
|
||||
tmpContact = contact.Contact('tempContactID%d' % i, str(i), i, i)
|
||||
self.failUnlessRaises(kbucket.BucketFull, self.kbucket.addContact, tmpContact)
|
||||
|
||||
|
||||
# Test if an existing contact is updated correctly if added again
|
||||
existingContact = self.kbucket._contacts[0]
|
||||
self.kbucket.addContact(existingContact)
|
||||
self.failUnlessEqual(self.kbucket._contacts.index(existingContact), len(self.kbucket._contacts)-1, 'Contact not correctly updated; it should be at the end of the list of contacts')
|
||||
self.failUnlessEqual(
|
||||
self.kbucket._contacts.index(existingContact),
|
||||
len(self.kbucket._contacts)-1,
|
||||
'Contact not correctly updated; it should be at the end of the list of contacts')
|
||||
|
||||
def testGetContacts(self):
|
||||
# try and get 2 contacts from empty list
|
||||
result = self.kbucket.getContacts(2)
|
||||
self.failIf(len(result) != 0, "Returned list should be empty; returned list length: %d" % (len(result)))
|
||||
self.failIf(len(result) != 0, "Returned list should be empty; returned list length: %d" %
|
||||
(len(result)))
|
||||
|
||||
|
||||
# Add k-2 contacts
|
||||
if constants.k >= 2:
|
||||
for i in range(constants.k-2):
|
||||
tmpContact = contact.Contact(i,i,i,i)
|
||||
tmpContact = contact.Contact(i, i, i, i)
|
||||
self.kbucket.addContact(tmpContact)
|
||||
else:
|
||||
# add k contacts
|
||||
for i in range(constants.k):
|
||||
tmpContact = contact.Contact(i,i,i,i)
|
||||
tmpContact = contact.Contact(i, i, i, i)
|
||||
self.kbucket.addContact(tmpContact)
|
||||
|
||||
# try to get too many contacts
|
||||
# requested count greater than bucket size; should return at most k contacts
|
||||
contacts = self.kbucket.getContacts(constants.k+3)
|
||||
self.failUnless(len(contacts) <= constants.k, 'Returned list should not have more than k entries!')
|
||||
self.failUnless(len(contacts) <= constants.k,
|
||||
'Returned list should not have more than k entries!')
|
||||
|
||||
# verify returned contacts in list
|
||||
for i in range(constants.k-2):
|
||||
self.failIf(self.kbucket._contacts[i].id != i, "Contact in position %s not same as added contact" % (str(i)))
|
||||
|
||||
self.failIf(self.kbucket._contacts[i].id != i,
|
||||
"Contact in position %s not same as added contact" % (str(i)))
|
||||
|
||||
# try to get too many contacts
|
||||
# requested count one greater than number of contacts
|
||||
if constants.k >= 2:
|
||||
result = self.kbucket.getContacts(constants.k-1)
|
||||
self.failIf(len(result) != constants.k-2, "Too many contacts in returned list %s - should be %s" % (len(result), constants.k-2))
|
||||
self.failIf(len(result) != constants.k-2,
|
||||
"Too many contacts in returned list %s - should be %s" %
|
||||
(len(result), constants.k-2))
|
||||
else:
|
||||
result = self.kbucket.getContacts(constants.k-1)
|
||||
# if the count is <= 0, it should return all of it's contats
|
||||
self.failIf(len(result) != constants.k, "Too many contacts in returned list %s - should be %s" % (len(result), constants.k-2))
|
||||
|
||||
# try to get contacts
|
||||
# requested count less than contact number
|
||||
if constants.k >= 3:
|
||||
self.failIf(len(result) != constants.k,
|
||||
"Too many contacts in returned list %s - should be %s" %
|
||||
(len(result), constants.k-2))
|
||||
result = self.kbucket.getContacts(constants.k-3)
|
||||
self.failIf(len(result) != constants.k-3, "Too many contacts in returned list %s - should be %s" % (len(result), constants.k-3))
|
||||
self.failIf(len(result) != constants.k-3,
|
||||
"Too many contacts in returned list %s - should be %s" %
|
||||
(len(result), constants.k-3))
|
||||
|
||||
def testRemoveContact(self):
|
||||
# try remove contact from empty list
|
||||
rmContact = contact.Contact('TestContactID1','127.0.0.1',1, 1)
|
||||
rmContact = contact.Contact('TestContactID1', '127.0.0.1', 1, 1)
|
||||
self.failUnlessRaises(ValueError, self.kbucket.removeContact, rmContact)
|
||||
|
||||
|
||||
# Add couple contacts
|
||||
for i in range(constants.k-2):
|
||||
tmpContact = contact.Contact('tmpTestContactID%d' % i, str(i), i, i)
|
||||
|
|
|
@ -6,56 +6,81 @@
|
|||
|
||||
import unittest
|
||||
|
||||
from lbrynet.dht.msgtypes import Message, RequestMessage, ResponseMessage, ErrorMessage
|
||||
from lbrynet.dht.msgtypes import RequestMessage, ResponseMessage, ErrorMessage
|
||||
from lbrynet.dht.msgformat import MessageTranslator, DefaultFormat
|
||||
|
||||
class DefaultFormatTranslatorTest(unittest.TestCase):
|
||||
""" Test case for the default message translator """
|
||||
def setUp(self):
|
||||
self.cases = ((RequestMessage('node1', 'rpcMethod', {'arg1': 'a string', 'arg2': 123}, 'rpc1'),
|
||||
self.cases = ((RequestMessage('node1', 'rpcMethod',
|
||||
{'arg1': 'a string', 'arg2': 123}, 'rpc1'),
|
||||
{DefaultFormat.headerType: DefaultFormat.typeRequest,
|
||||
DefaultFormat.headerNodeID: 'node1',
|
||||
DefaultFormat.headerMsgID: 'rpc1',
|
||||
DefaultFormat.headerPayload: 'rpcMethod',
|
||||
DefaultFormat.headerArgs: {'arg1': 'a string', 'arg2': 123}}),
|
||||
|
||||
|
||||
(ResponseMessage('rpc2', 'node2', 'response'),
|
||||
{DefaultFormat.headerType: DefaultFormat.typeResponse,
|
||||
DefaultFormat.headerNodeID: 'node2',
|
||||
DefaultFormat.headerMsgID: 'rpc2',
|
||||
DefaultFormat.headerPayload: 'response'}),
|
||||
|
||||
(ErrorMessage('rpc3', 'node3', "<type 'exceptions.ValueError'>", 'this is a test exception'),
|
||||
|
||||
(ErrorMessage('rpc3', 'node3',
|
||||
"<type 'exceptions.ValueError'>", 'this is a test exception'),
|
||||
{DefaultFormat.headerType: DefaultFormat.typeError,
|
||||
DefaultFormat.headerNodeID: 'node3',
|
||||
DefaultFormat.headerMsgID: 'rpc3',
|
||||
DefaultFormat.headerPayload: "<type 'exceptions.ValueError'>",
|
||||
DefaultFormat.headerArgs: 'this is a test exception'}),
|
||||
|
||||
(ResponseMessage('rpc4', 'node4', [('H\x89\xb0\xf4\xc9\xe6\xc5`H>\xd5\xc2\xc5\xe8Od\xf1\xca\xfa\x82', '127.0.0.1', 1919), ('\xae\x9ey\x93\xdd\xeb\xf1^\xff\xc5\x0f\xf8\xac!\x0e\x03\x9fY@{', '127.0.0.1', 1921)]),
|
||||
|
||||
(ResponseMessage(
|
||||
'rpc4', 'node4',
|
||||
[('H\x89\xb0\xf4\xc9\xe6\xc5`H>\xd5\xc2\xc5\xe8Od\xf1\xca\xfa\x82',
|
||||
'127.0.0.1', 1919),
|
||||
('\xae\x9ey\x93\xdd\xeb\xf1^\xff\xc5\x0f\xf8\xac!\x0e\x03\x9fY@{',
|
||||
'127.0.0.1', 1921)]),
|
||||
{DefaultFormat.headerType: DefaultFormat.typeResponse,
|
||||
DefaultFormat.headerNodeID: 'node4',
|
||||
DefaultFormat.headerMsgID: 'rpc4',
|
||||
DefaultFormat.headerPayload: [('H\x89\xb0\xf4\xc9\xe6\xc5`H>\xd5\xc2\xc5\xe8Od\xf1\xca\xfa\x82', '127.0.0.1', 1919), ('\xae\x9ey\x93\xdd\xeb\xf1^\xff\xc5\x0f\xf8\xac!\x0e\x03\x9fY@{', '127.0.0.1', 1921)]})
|
||||
DefaultFormat.headerPayload:
|
||||
[('H\x89\xb0\xf4\xc9\xe6\xc5`H>\xd5\xc2\xc5\xe8Od\xf1\xca\xfa\x82',
|
||||
'127.0.0.1', 1919),
|
||||
('\xae\x9ey\x93\xdd\xeb\xf1^\xff\xc5\x0f\xf8\xac!\x0e\x03\x9fY@{',
|
||||
'127.0.0.1', 1921)]})
|
||||
)
|
||||
self.translator = DefaultFormat()
|
||||
self.failUnless(isinstance(self.translator, MessageTranslator), 'Translator class must inherit from entangled.kademlia.msgformat.MessageTranslator!')
|
||||
self.failUnless(
|
||||
isinstance(self.translator, MessageTranslator),
|
||||
'Translator class must inherit from entangled.kademlia.msgformat.MessageTranslator!')
|
||||
|
||||
def testToPrimitive(self):
|
||||
""" Tests translation from a Message object to a primitive """
|
||||
for msg, msgPrimitive in self.cases:
|
||||
translatedObj = self.translator.toPrimitive(msg)
|
||||
self.failUnlessEqual(len(translatedObj), len(msgPrimitive), "Translated object does not match example object's size")
|
||||
self.failUnlessEqual(len(translatedObj), len(msgPrimitive),
|
||||
"Translated object does not match example object's size")
|
||||
for key in msgPrimitive:
|
||||
self.failUnlessEqual(translatedObj[key], msgPrimitive[key], 'Message object type %s not translated correctly into primitive on key "%s"; expected "%s", got "%s"' % (msg.__class__.__name__, key, msgPrimitive[key], translatedObj[key]))
|
||||
|
||||
self.failUnlessEqual(
|
||||
translatedObj[key], msgPrimitive[key],
|
||||
''.join(('Message object type %s not translated correctly into primitive on ',
|
||||
'key "%s"; expected "%s", got "%s"')) %
|
||||
(msg.__class__.__name__, key, msgPrimitive[key], translatedObj[key]))
|
||||
|
||||
def testFromPrimitive(self):
|
||||
""" Tests translation from a primitive to a Message object """
|
||||
for msg, msgPrimitive in self.cases:
|
||||
translatedObj = self.translator.fromPrimitive(msgPrimitive)
|
||||
self.failUnlessEqual(type(translatedObj), type(msg), 'Message type incorrectly translated; expected "%s", got "%s"' % (type(msg), type(translatedObj)))
|
||||
self.failUnlessEqual(
|
||||
type(translatedObj), type(msg),
|
||||
'Message type incorrectly translated; expected "%s", got "%s"' %
|
||||
(type(msg), type(translatedObj)))
|
||||
for key in msg.__dict__:
|
||||
self.failUnlessEqual(msg.__dict__[key], translatedObj.__dict__[key], 'Message instance variable "%s" not translated correctly; expected "%s", got "%s"' % (key, msg.__dict__[key], translatedObj.__dict__[key]))
|
||||
self.failUnlessEqual(
|
||||
msg.__dict__[key], translatedObj.__dict__[key],
|
||||
''.join(('Message instance variable "%s" not translated correctly; ',
|
||||
'expected "%s", got "%s"')) %
|
||||
(key, msg.__dict__[key], translatedObj.__dict__[key]))
|
||||
|
||||
|
||||
def suite():
|
||||
|
|
|
@ -22,8 +22,9 @@ class TestEncryptedFileSaver(unittest.TestCase):
|
|||
wallet = None
|
||||
download_directory = '.'
|
||||
upload_allowed = False
|
||||
saver = EncryptedFileSaver(stream_hash, peer_finder, rate_limiter, blob_manager, stream_info_manager,
|
||||
payment_rate_manager, wallet, download_directory, file_name)
|
||||
saver = EncryptedFileSaver(
|
||||
stream_hash, peer_finder, rate_limiter, blob_manager, stream_info_manager,
|
||||
payment_rate_manager, wallet, download_directory, file_name)
|
||||
|
||||
yield saver._setup_output()
|
||||
self.assertTrue(os.path.isfile(file_name))
|
||||
|
|
|
@ -3,7 +3,6 @@ import shutil
|
|||
from twisted.trial import unittest
|
||||
from twisted.internet import defer
|
||||
from lbrynet.lbry_file.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
|
||||
from lbrynet.core.Error import NoSuchStreamHash
|
||||
from lbrynet.tests.util import random_lbry_hash
|
||||
|
@ -21,15 +20,15 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
|||
def test_basic(self):
|
||||
yield self.manager.setup()
|
||||
out = yield self.manager.get_all_streams()
|
||||
self.assertEqual(len(out),0)
|
||||
self.assertEqual(len(out), 0)
|
||||
|
||||
stream_hash = random_lbry_hash()
|
||||
stream_hash = random_lbry_hash()
|
||||
file_name = 'file_name'
|
||||
key = 'key'
|
||||
suggested_file_name = 'sug_file_name'
|
||||
blob1 = CryptBlobInfo(random_lbry_hash(),0,10,1)
|
||||
blob2 = CryptBlobInfo(random_lbry_hash(),0,10,1)
|
||||
blobs=[blob1,blob2]
|
||||
blob1 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1)
|
||||
blob2 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1)
|
||||
blobs = [blob1, blob2]
|
||||
|
||||
# save stream
|
||||
yield self.manager.save_stream(stream_hash, file_name, key, suggested_file_name, blobs)
|
||||
|
@ -49,9 +48,9 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
|||
self.assertEqual(1, len(out))
|
||||
|
||||
# add a blob to stream
|
||||
blob3 = CryptBlobInfo(random_lbry_hash(),0,10,1)
|
||||
blob3 = CryptBlobInfo(random_lbry_hash(), 0, 10, 1)
|
||||
blobs = [blob3]
|
||||
out = yield self.manager.add_blobs_to_stream(stream_hash,blobs)
|
||||
out = yield self.manager.add_blobs_to_stream(stream_hash, blobs)
|
||||
out = yield self.manager.get_blobs_for_stream(stream_hash)
|
||||
self.assertEqual(3, len(out))
|
||||
|
||||
|
@ -67,7 +66,7 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
|||
yield self.manager.save_sd_blob_hash_to_stream(stream_hash, sd_blob_hash)
|
||||
out = yield self.manager.get_sd_blob_hashes_for_stream(stream_hash)
|
||||
self.assertEqual(1, len(out))
|
||||
self.assertEqual(sd_blob_hash,out[0])
|
||||
self.assertEqual(sd_blob_hash, out[0])
|
||||
|
||||
out = yield self.manager.get_stream_hash_for_sd_hash(sd_blob_hash)
|
||||
self.assertEqual(stream_hash, out)
|
||||
|
@ -76,6 +75,3 @@ class DBEncryptedFileMetadataManagerTest(unittest.TestCase):
|
|||
yield self.manager.delete_stream(stream_hash)
|
||||
out = yield self.manager.check_if_stream_exists(stream_hash)
|
||||
self.assertFalse(out)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
import mock
|
||||
from twisted.trial import unittest
|
||||
|
@ -37,7 +34,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
|
|||
def create_file(self, filename):
|
||||
session = mock.Mock(spec=Session.Session)(None, None)
|
||||
hash_announcer = DHTHashAnnouncer.DHTHashAnnouncer(None, None)
|
||||
self.blob_manager = BlobManager.DiskBlobManager(hash_announcer, self.tmp_blob_dir, self.tmp_db_dir)
|
||||
self.blob_manager = BlobManager.DiskBlobManager(
|
||||
hash_announcer, self.tmp_blob_dir, self.tmp_db_dir)
|
||||
session.blob_manager = self.blob_manager
|
||||
yield session.blob_manager.setup()
|
||||
session.db_dir = self.tmp_db_dir
|
||||
|
|
|
@ -19,13 +19,14 @@ class TestEncryptedFileManager(unittest.TestCase):
|
|||
|
||||
session = MocSession()
|
||||
session.db_dir = '.'
|
||||
stream_info_manager = None
|
||||
stream_info_manager = None
|
||||
sd_identifier = None
|
||||
download_directory = '.'
|
||||
manager = EncryptedFileManager(session, stream_info_manager, sd_identifier, download_directory)
|
||||
manager = EncryptedFileManager(
|
||||
session, stream_info_manager, sd_identifier, download_directory)
|
||||
yield manager._open_db()
|
||||
out = yield manager._get_all_lbry_files()
|
||||
self.assertEqual(len(out),0)
|
||||
self.assertEqual(len(out), 0)
|
||||
|
||||
stream_hash = random_lbry_hash()
|
||||
blob_data_rate = 0
|
||||
|
|
|
@ -8,7 +8,6 @@ from lbryschema.decode import smart_decode
|
|||
from lbrynet import conf
|
||||
from lbrynet.core import Session, PaymentRateManager, Wallet
|
||||
from lbrynet.daemon.Daemon import Daemon as LBRYDaemon
|
||||
from lbrynet.daemon import ExchangeRateManager
|
||||
|
||||
from lbrynet.tests import util
|
||||
from lbrynet.tests.mocks import mock_conf_settings, FakeNetwork
|
||||
|
@ -43,7 +42,8 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
|||
"license_url": "fake license url",
|
||||
"nsfw": False,
|
||||
"sources": {
|
||||
"lbry_sd_hash": "d2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280"
|
||||
"lbry_sd_hash": ''.join(('d2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98',
|
||||
'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280'))
|
||||
},
|
||||
"thumbnail": "fake thumbnail",
|
||||
"title": "fake title",
|
||||
|
@ -54,7 +54,8 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
|||
{"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}})
|
||||
daemon._resolve_name = lambda _: defer.succeed(metadata)
|
||||
migrated = smart_decode(json.dumps(metadata))
|
||||
daemon.session.wallet.resolve = lambda *_: defer.succeed({"test": {'claim': {'value': migrated.claim_dict}}})
|
||||
daemon.session.wallet.resolve = lambda *_: defer.succeed(
|
||||
{"test": {'claim': {'value': migrated.claim_dict}}})
|
||||
return daemon
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,8 @@ class DaemonCLITests(unittest.TestCase):
|
|||
self.assertEqual('0.3.8', DaemonCLI.guess_type('0.3.8'))
|
||||
self.assertEqual(0.3, DaemonCLI.guess_type('0.3'))
|
||||
self.assertEqual(3, DaemonCLI.guess_type('3'))
|
||||
self.assertEqual('VdNmakxFORPSyfCprAD/eDDPk5TY9QYtSA==', DaemonCLI.guess_type('VdNmakxFORPSyfCprAD/eDDPk5TY9QYtSA=='))
|
||||
self.assertEqual('VdNmakxFORPSyfCprAD/eDDPk5TY9QYtSA==',
|
||||
DaemonCLI.guess_type('VdNmakxFORPSyfCprAD/eDDPk5TY9QYtSA=='))
|
||||
self.assertEqual(0.3, DaemonCLI.guess_type('0.3'))
|
||||
self.assertEqual(True, DaemonCLI.guess_type('TRUE'))
|
||||
self.assertEqual(True, DaemonCLI.guess_type('true'))
|
||||
|
|
|
@ -1,24 +1,18 @@
|
|||
import types
|
||||
import mock
|
||||
import json
|
||||
from twisted.trial import unittest
|
||||
from twisted.internet import defer, task
|
||||
|
||||
from lbryschema.claim import ClaimDict
|
||||
|
||||
from lbrynet.core import Session, PaymentRateManager, Wallet
|
||||
from lbrynet.core.Error import DownloadTimeoutError
|
||||
from lbrynet.daemon import Downloader
|
||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier,StreamMetadata
|
||||
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
|
||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier
|
||||
|
||||
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader, ManagedEncryptedFileDownloaderFactory
|
||||
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
||||
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
|
||||
|
||||
from lbrynet.tests.mocks import BlobAvailabilityTracker as DummyBlobAvailabilityTracker
|
||||
from lbrynet.tests.mocks import ExchangeRateManager as DummyExchangeRateManager
|
||||
from lbrynet.tests.mocks import BTCLBCFeed, USDBTCFeed
|
||||
from lbrynet.tests.mocks import mock_conf_settings
|
||||
|
||||
class MocDownloader(object):
|
||||
|
@ -33,7 +27,8 @@ class MocDownloader(object):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def status(self):
|
||||
out = yield EncryptedFileStatusReport(self.name, self.num_completed, self.num_known, self.running_status)
|
||||
out = yield EncryptedFileStatusReport(self.name, self.num_completed, self.num_known,
|
||||
self.running_status)
|
||||
defer.returnValue(out)
|
||||
|
||||
def start(self):
|
||||
|
@ -43,8 +38,9 @@ class MocDownloader(object):
|
|||
self.stop_called = True
|
||||
self.finish_deferred.callback(True)
|
||||
|
||||
def moc_initialize(self,stream_info):
|
||||
self.sd_hash ="d5169241150022f996fa7cd6a9a1c421937276a3275eb912790bd07ba7aec1fac5fd45431d226b8fb402691e79aeb24b"
|
||||
def moc_initialize(self, stream_info):
|
||||
self.sd_hash = ''.join(('d5169241150022f996fa7cd6a9a1c421937276a3275eb912',
|
||||
'790bd07ba7aec1fac5fd45431d226b8fb402691e79aeb24b'))
|
||||
return None
|
||||
|
||||
def moc_download_sd_blob(self):
|
||||
|
@ -69,7 +65,7 @@ class GetStreamTests(unittest.TestCase):
|
|||
prm = mock.Mock(spec=PaymentRateManager.NegotiatedPaymentRateManager)
|
||||
session.payment_rate_manager = prm
|
||||
market_feeds = []
|
||||
rates={}
|
||||
rates = {}
|
||||
exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates)
|
||||
exchange_rate_manager = mock.Mock(spec=ExchangeRateManager)
|
||||
max_key_fee = {'currency':"LBC", 'amount':10, 'address':''}
|
||||
|
@ -96,7 +92,7 @@ class GetStreamTests(unittest.TestCase):
|
|||
stream_info = None
|
||||
|
||||
with self.assertRaises(AttributeError):
|
||||
yield getstream.start(stream_info,name)
|
||||
yield getstream.start(stream_info, name)
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -108,15 +104,15 @@ class GetStreamTests(unittest.TestCase):
|
|||
def download_sd_blob(self):
|
||||
raise DownloadTimeoutError(self.file_name)
|
||||
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
name = 'test'
|
||||
stream_info = None
|
||||
with self.assertRaises(DownloadTimeoutError):
|
||||
yield getstream.start(stream_info,name)
|
||||
yield getstream.start(stream_info, name)
|
||||
self.assertFalse(getstream.pay_key_fee_called)
|
||||
|
||||
|
||||
|
@ -126,14 +122,14 @@ class GetStreamTests(unittest.TestCase):
|
|||
test that timeout (set to 2 here) exception is raised
|
||||
when download times out while downloading first blob, and key fee is paid
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
name = 'test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
start = getstream.start(stream_info, name)
|
||||
self.clock.advance(1)
|
||||
self.clock.advance(1)
|
||||
with self.assertRaises(DownloadTimeoutError):
|
||||
|
@ -147,15 +143,15 @@ class GetStreamTests(unittest.TestCase):
|
|||
test that if we have 1 completed blob, start() returns
|
||||
and key fee is paid
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
getstream.pay_key_fee = types.MethodType(moc_pay_key_fee, getstream)
|
||||
name='test'
|
||||
name = 'test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
start = getstream.start(stream_info, name)
|
||||
|
||||
getstream.downloader.num_completed = 1
|
||||
self.clock.advance(1)
|
||||
|
@ -170,13 +166,13 @@ class GetStreamTests(unittest.TestCase):
|
|||
test that if we have a stopped downloader, beforfe a blob is downloaded,
|
||||
start() returns
|
||||
"""
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream = self.init_getstream_with_mocs()
|
||||
getstream._initialize = types.MethodType(moc_initialize, getstream)
|
||||
getstream._download_sd_blob = types.MethodType(moc_download_sd_blob, getstream)
|
||||
getstream._download = types.MethodType(moc_download, getstream)
|
||||
name='test'
|
||||
name = 'test'
|
||||
stream_info = None
|
||||
start = getstream.start(stream_info,name)
|
||||
start = getstream.start(stream_info, name)
|
||||
|
||||
getstream.downloader.running_status = ManagedEncryptedFileDownloader.STATUS_STOPPED
|
||||
self.clock.advance(1)
|
||||
|
|
|
@ -58,7 +58,7 @@ class FeeTest(unittest.TestCase):
|
|||
}
|
||||
|
||||
market_feeds = [BTCLBCFeed(), USDBTCFeed()]
|
||||
manager = DummyExchangeRateManager(market_feeds,rates)
|
||||
manager = DummyExchangeRateManager(market_feeds, rates)
|
||||
result = manager.convert_currency(fee.currency, "LBC", fee.amount)
|
||||
self.assertEqual(60.0, result)
|
||||
|
||||
|
@ -74,7 +74,7 @@ class FeeTest(unittest.TestCase):
|
|||
'BTCLBC': {'spot': 1.0, 'ts': util.DEFAULT_ISO_TIME + 1},
|
||||
}
|
||||
market_feeds = [BTCLBCFeed()]
|
||||
manager = DummyExchangeRateManager(market_feeds,rates)
|
||||
manager = DummyExchangeRateManager(market_feeds, rates)
|
||||
with self.assertRaises(Exception):
|
||||
manager.convert_currency(fee.currency, "LBC", fee.amount)
|
||||
|
||||
|
@ -84,16 +84,18 @@ class LBRYioFeedTest(unittest.TestCase):
|
|||
def test_handle_response(self):
|
||||
feed = ExchangeRateManager.LBRYioFeed()
|
||||
|
||||
response ='{\"data\": {\"fresh\": 0, \"lbc_usd\": 0.05863062523378918, \"lbc_btc\": 5.065289549855739e-05, \"btc_usd\": 1157.498}, \"success\": true, \"error\": null}'
|
||||
response = ''.join(('{\"data\": {\"fresh\": 0, \"lbc_usd\": 0.05863062523378918, ',
|
||||
'\"lbc_btc\": 5.065289549855739e-05, \"btc_usd\": 1157.498}, ',
|
||||
'\"success\": true, \"error\": null}'))
|
||||
out = yield feed._handle_response(response)
|
||||
expected = 1.0 / 5.065289549855739e-05
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
response='{}'
|
||||
response = '{}'
|
||||
with self.assertRaises(InvalidExchangeRateResponse):
|
||||
out = yield feed._handle_response(response)
|
||||
|
||||
response='{"success":true,"result":[]}'
|
||||
response = '{"success":true,"result":[]}'
|
||||
with self.assertRaises(InvalidExchangeRateResponse):
|
||||
out = yield feed._handle_response(response)
|
||||
|
||||
|
@ -103,15 +105,17 @@ class LBRYioBTCFeedTest(unittest.TestCase):
|
|||
def test_handle_response(self):
|
||||
feed = ExchangeRateManager.LBRYioBTCFeed()
|
||||
|
||||
response ='{\"data\": {\"fresh\": 0, \"lbc_usd\": 0.05863062523378918, \"lbc_btc\": 5.065289549855739e-05, \"btc_usd\": 1157.498}, \"success\": true, \"error\": null}'
|
||||
response = ''.join(('{\"data\": {\"fresh\": 0, \"lbc_usd\": 0.05863062523378918, ',
|
||||
'\"lbc_btc\": 5.065289549855739e-05, \"btc_usd\": 1157.498}, ',
|
||||
'\"success\": true, \"error\": null}'))
|
||||
out = yield feed._handle_response(response)
|
||||
expected = 1.0 / 1157.498
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
response='{}'
|
||||
response = '{}'
|
||||
with self.assertRaises(InvalidExchangeRateResponse):
|
||||
out = yield feed._handle_response(response)
|
||||
|
||||
response='{"success":true,"result":[]}'
|
||||
response = '{"success":true,"result":[]}'
|
||||
with self.assertRaises(InvalidExchangeRateResponse):
|
||||
out = yield feed._handle_response(response)
|
||||
|
|
|
@ -64,8 +64,8 @@ class SettingsTest(unittest.TestCase):
|
|||
with self.assertRaises(InvalidCurrencyError):
|
||||
settings.set('max_key_fee', {'currency':'USD', 'amount':1})
|
||||
|
||||
valid_setting= {'currency':'BTC', 'amount':1}
|
||||
settings.set('max_key_fee', valid_setting )
|
||||
valid_setting = {'currency':'BTC', 'amount':1}
|
||||
settings.set('max_key_fee', valid_setting)
|
||||
out = settings.get('max_key_fee')
|
||||
self.assertEqual(out, valid_setting)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import datetime
|
||||
import time
|
||||
import binascii
|
||||
import os
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
import mock
|
||||
|
|
Loading…
Reference in a new issue