forked from LBRYCommunity/lbry-sdk
test_misc and test_streamify functionals on py3
This commit is contained in:
parent
8c3552dd34
commit
451823f33e
12 changed files with 49 additions and 48 deletions
|
@ -1,4 +1,5 @@
|
|||
import binascii
|
||||
import string
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import logging
|
||||
|
@ -81,7 +82,7 @@ class StreamDescriptorWriter:
|
|||
|
||||
def create_descriptor(self, sd_info):
|
||||
return self._write_stream_descriptor(
|
||||
json.dumps(sd_info, sort_keys=True, cls=JSONBytesEncoder).encode()
|
||||
json.dumps(sd_info, sort_keys=True).encode()
|
||||
)
|
||||
|
||||
def _write_stream_descriptor(self, raw_data):
|
||||
|
@ -263,7 +264,7 @@ def save_sd_info(blob_manager, sd_hash, sd_info):
|
|||
(sd_hash, calculated_sd_hash))
|
||||
stream_hash = yield blob_manager.storage.get_stream_hash_for_sd_hash(sd_hash)
|
||||
if not stream_hash:
|
||||
log.debug("Saving info for %s", sd_info['stream_name'].decode('hex'))
|
||||
log.debug("Saving info for %s", binascii.unhexlify(sd_info['stream_name']))
|
||||
stream_name = sd_info['stream_name']
|
||||
key = sd_info['key']
|
||||
stream_hash = sd_info['stream_hash']
|
||||
|
@ -355,16 +356,16 @@ def get_blob_hashsum(b):
|
|||
if length != 0:
|
||||
blob_hashsum.update(blob_hash.encode())
|
||||
blob_hashsum.update(str(blob_num).encode())
|
||||
blob_hashsum.update(iv)
|
||||
blob_hashsum.update(iv.encode())
|
||||
blob_hashsum.update(str(length).encode())
|
||||
return blob_hashsum.digest()
|
||||
|
||||
|
||||
def get_stream_hash(hex_stream_name, key, hex_suggested_file_name, blob_infos):
|
||||
h = get_lbry_hash_obj()
|
||||
h.update(hex_stream_name)
|
||||
h.update(key)
|
||||
h.update(hex_suggested_file_name)
|
||||
h.update(hex_stream_name.encode())
|
||||
h.update(key.encode())
|
||||
h.update(hex_suggested_file_name.encode())
|
||||
blobs_hashsum = get_lbry_hash_obj()
|
||||
for blob in blob_infos:
|
||||
blobs_hashsum.update(get_blob_hashsum(blob))
|
||||
|
@ -373,8 +374,7 @@ def get_stream_hash(hex_stream_name, key, hex_suggested_file_name, blob_infos):
|
|||
|
||||
|
||||
def verify_hex(text, field_name):
|
||||
for c in text:
|
||||
if c not in b'0123456789abcdef':
|
||||
if not set(text).issubset(set(string.hexdigits)):
|
||||
raise InvalidStreamDescriptorError("%s is not a hex-encoded string" % field_name)
|
||||
|
||||
|
||||
|
@ -400,7 +400,7 @@ def validate_descriptor(stream_info):
|
|||
|
||||
calculated_stream_hash = get_stream_hash(
|
||||
hex_stream_name, key, hex_suggested_file_name, blobs
|
||||
).encode()
|
||||
)
|
||||
if calculated_stream_hash != stream_hash:
|
||||
raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
|
||||
return True
|
||||
|
|
|
@ -32,7 +32,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
self._rate_limiter = self.factory.rate_limiter
|
||||
self.peer = self.factory.peer
|
||||
self._response_deferreds = {}
|
||||
self._response_buff = ''
|
||||
self._response_buff = b''
|
||||
self._downloading_blob = False
|
||||
self._blob_download_request = None
|
||||
self._next_request = {}
|
||||
|
@ -59,7 +59,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
self.transport.loseConnection()
|
||||
response, extra_data = self._get_valid_response(self._response_buff)
|
||||
if response is not None:
|
||||
self._response_buff = ''
|
||||
self._response_buff = b''
|
||||
self._handle_response(response)
|
||||
if self._downloading_blob is True and len(extra_data) != 0:
|
||||
self._blob_download_request.write(extra_data)
|
||||
|
@ -69,17 +69,17 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
self.peer.report_down()
|
||||
self.transport.abortConnection()
|
||||
|
||||
def connectionLost(self, reason):
|
||||
def connectionLost(self, reason=None):
|
||||
log.debug("Connection lost to %s: %s", self.peer, reason)
|
||||
self.setTimeout(None)
|
||||
self.connection_closed = True
|
||||
if reason.check(error.ConnectionDone):
|
||||
if reason.check(error.ConnectionDone) or reason is None:
|
||||
err = failure.Failure(ConnectionClosedBeforeResponseError())
|
||||
else:
|
||||
err = reason
|
||||
for key, d in self._response_deferreds.items():
|
||||
del self._response_deferreds[key]
|
||||
d.errback(err)
|
||||
self._response_deferreds.clear()
|
||||
if self._blob_download_request is not None:
|
||||
self._blob_download_request.cancel(err)
|
||||
self.factory.connection_was_made_deferred.callback(True)
|
||||
|
@ -124,7 +124,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
|
||||
def _handle_request_error(self, err):
|
||||
log.error("An unexpected error occurred creating or sending a request to %s. %s: %s",
|
||||
self.peer, err.type, err.message)
|
||||
self.peer, err.type, err)
|
||||
self.transport.loseConnection()
|
||||
|
||||
def _ask_for_request(self):
|
||||
|
@ -149,7 +149,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
self.setTimeout(self.PROTOCOL_TIMEOUT)
|
||||
# TODO: compare this message to the last one. If they're the same,
|
||||
# TODO: incrementally delay this message.
|
||||
m = json.dumps(request_msg, default=encode_decimal)
|
||||
m = json.dumps(request_msg, default=encode_decimal).encode()
|
||||
self.transport.write(m)
|
||||
|
||||
def _get_valid_response(self, response_msg):
|
||||
|
@ -157,7 +157,7 @@ class ClientProtocol(Protocol, TimeoutMixin):
|
|||
response = None
|
||||
curr_pos = 0
|
||||
while 1:
|
||||
next_close_paren = response_msg.find('}', curr_pos)
|
||||
next_close_paren = response_msg.find(b'}', curr_pos)
|
||||
if next_close_paren != -1:
|
||||
curr_pos = next_close_paren + 1
|
||||
try:
|
||||
|
|
|
@ -16,8 +16,8 @@ class ServerRequestHandler:
|
|||
def __init__(self, consumer):
|
||||
self.consumer = consumer
|
||||
self.production_paused = False
|
||||
self.request_buff = ''
|
||||
self.response_buff = ''
|
||||
self.request_buff = b''
|
||||
self.response_buff = b''
|
||||
self.producer = None
|
||||
self.request_received = False
|
||||
self.CHUNK_SIZE = 2**14
|
||||
|
@ -54,7 +54,7 @@ class ServerRequestHandler:
|
|||
return
|
||||
chunk = self.response_buff[:self.CHUNK_SIZE]
|
||||
self.response_buff = self.response_buff[self.CHUNK_SIZE:]
|
||||
if chunk == '':
|
||||
if chunk == b'':
|
||||
return
|
||||
log.trace("writing %s bytes to the client", len(chunk))
|
||||
self.consumer.write(chunk)
|
||||
|
@ -99,7 +99,7 @@ class ServerRequestHandler:
|
|||
self.request_buff = self.request_buff + data
|
||||
msg = self.try_to_parse_request(self.request_buff)
|
||||
if msg:
|
||||
self.request_buff = ''
|
||||
self.request_buff = b''
|
||||
self._process_msg(msg)
|
||||
else:
|
||||
log.debug("Request buff not a valid json message")
|
||||
|
@ -132,7 +132,7 @@ class ServerRequestHandler:
|
|||
self._produce_more()
|
||||
|
||||
def send_response(self, msg):
|
||||
m = json.dumps(msg)
|
||||
m = json.dumps(msg).encode()
|
||||
log.debug("Sending a response of length %s", str(len(m)))
|
||||
log.debug("Response: %s", str(m))
|
||||
self.response_buff = self.response_buff + m
|
||||
|
|
|
@ -23,7 +23,7 @@ class CryptBlobInfo(BlobInfo):
|
|||
info = {
|
||||
"blob_num": self.blob_num,
|
||||
"length": self.length,
|
||||
"iv": self.iv
|
||||
"iv": self.iv.decode()
|
||||
}
|
||||
if self.blob_hash:
|
||||
info['blob_hash'] = self.blob_hash
|
||||
|
|
|
@ -61,7 +61,7 @@ class CryptStreamDownloader:
|
|||
self.payment_rate_manager = payment_rate_manager
|
||||
self.wallet = wallet
|
||||
self.key = binascii.unhexlify(key)
|
||||
self.stream_name = binascii.unhexlify(stream_name)
|
||||
self.stream_name = binascii.unhexlify(stream_name).decode()
|
||||
self.completed = False
|
||||
self.stopped = True
|
||||
self.stopping = False
|
||||
|
|
|
@ -37,14 +37,14 @@ class EncryptedFileStreamCreator(CryptStreamCreator):
|
|||
def _finished(self):
|
||||
# calculate the stream hash
|
||||
self.stream_hash = get_stream_hash(
|
||||
hexlify(self.name.encode()), hexlify(self.key), hexlify(self.name.encode()),
|
||||
hexlify(self.name.encode()).decode(), hexlify(self.key).decode(), hexlify(self.name.encode()).decode(),
|
||||
self.blob_infos
|
||||
)
|
||||
|
||||
# generate the sd info
|
||||
self.sd_info = format_sd_info(
|
||||
EncryptedFileStreamType, hexlify(self.name.encode()), hexlify(self.key),
|
||||
hexlify(self.name.encode()), self.stream_hash.encode(), self.blob_infos
|
||||
EncryptedFileStreamType, hexlify(self.name.encode()).decode(), hexlify(self.key).decode(),
|
||||
hexlify(self.name.encode()).decode(), self.stream_hash, self.blob_infos
|
||||
)
|
||||
|
||||
# sanity check
|
||||
|
|
|
@ -3,6 +3,7 @@ Keep track of which LBRY Files are downloading and store their LBRY File specifi
|
|||
"""
|
||||
import os
|
||||
import logging
|
||||
from binascii import hexlify, unhexlify
|
||||
|
||||
from twisted.internet import defer, task, reactor
|
||||
from twisted.python.failure import Failure
|
||||
|
@ -185,7 +186,7 @@ class EncryptedFileManager:
|
|||
# when we save the file we'll atomic touch the nearest file to the suggested file name
|
||||
# that doesn't yet exist in the download directory
|
||||
rowid = yield self.storage.save_downloaded_file(
|
||||
stream_hash, os.path.basename(file_name.decode('hex')).encode('hex'), download_directory, blob_data_rate
|
||||
stream_hash, hexlify(os.path.basename(unhexlify(file_name))), download_directory, blob_data_rate
|
||||
)
|
||||
file_name = yield self.storage.get_filename_for_rowid(rowid)
|
||||
lbry_file = self._get_lbry_file(
|
||||
|
|
|
@ -92,7 +92,7 @@ class LbryUploader(object):
|
|||
query_handler_factories,
|
||||
self.peer_manager)
|
||||
self.server_port = reactor.listenTCP(5553, server_factory, interface="localhost")
|
||||
test_file = GenFile(self.file_size, b''.join([chr(i) for i in xrange(0, 64, 6)]))
|
||||
test_file = GenFile(self.file_size, bytes([i for i in range(0, 64, 6)]))
|
||||
lbry_file = yield create_lbry_file(self.blob_manager, self.storage, self.prm, self.lbry_file_manager,
|
||||
"test_file", test_file)
|
||||
defer.returnValue(lbry_file.sd_hash)
|
||||
|
@ -155,10 +155,10 @@ class TestTransfer(unittest.TestCase):
|
|||
)
|
||||
metadata = yield self.sd_identifier.get_metadata_for_sd_blob(sd_blob)
|
||||
downloader = yield metadata.factories[0].make_downloader(
|
||||
metadata, self.prm.min_blob_data_payment_rate, self.prm, self.db_dir, download_mirrors=None
|
||||
metadata, self.prm.min_blob_data_payment_rate, self.prm, self.db_dir.encode(), download_mirrors=None
|
||||
)
|
||||
yield downloader.start()
|
||||
with open(os.path.join(self.db_dir, 'test_file')) as f:
|
||||
with open(os.path.join(self.db_dir, 'test_file'), 'rb') as f:
|
||||
hashsum = md5()
|
||||
hashsum.update(f.read())
|
||||
self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")
|
||||
|
|
|
@ -78,13 +78,13 @@ class TestStreamify(TestCase):
|
|||
iv = 0
|
||||
while 1:
|
||||
iv += 1
|
||||
yield "%016d" % iv
|
||||
yield b"%016d" % iv
|
||||
|
||||
def create_stream():
|
||||
test_file = GenFile(5209343, b''.join([chr(i + 3) for i in range(0, 64, 6)]))
|
||||
test_file = GenFile(5209343, bytes([(i + 3) for i in range(0, 64, 6)]))
|
||||
d = create_lbry_file(
|
||||
self.blob_manager, self.storage, self.prm, self.lbry_file_manager, "test_file", test_file,
|
||||
key="0123456701234567", iv_generator=iv_generator()
|
||||
key=b'0123456701234567', iv_generator=iv_generator()
|
||||
)
|
||||
d.addCallback(lambda lbry_file: lbry_file.stream_hash)
|
||||
return d
|
||||
|
@ -95,13 +95,13 @@ class TestStreamify(TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def test_create_and_combine_stream(self):
|
||||
test_file = GenFile(53209343, b''.join([chr(i + 5) for i in range(0, 64, 6)]))
|
||||
test_file = GenFile(53209343, bytes([(i + 5) for i in range(0, 64, 6)]))
|
||||
lbry_file = yield create_lbry_file(self.blob_manager, self.storage, self.prm, self.lbry_file_manager,
|
||||
"test_file", test_file)
|
||||
sd_hash = yield self.storage.get_sd_blob_hash_for_stream(lbry_file.stream_hash)
|
||||
self.assertTrue(lbry_file.sd_hash, sd_hash)
|
||||
yield lbry_file.start()
|
||||
f = open('test_file')
|
||||
f = open('test_file', 'rb')
|
||||
hashsum = md5()
|
||||
hashsum.update(f.read())
|
||||
self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b")
|
||||
|
|
|
@ -65,13 +65,13 @@ class BasicTransactionTest(IntegrationTestCase):
|
|||
await self.broadcast(cert_tx)
|
||||
await self.broadcast(claim_tx)
|
||||
await asyncio.wait([ # mempool
|
||||
self.on_transaction(claim_tx),
|
||||
self.on_transaction(cert_tx),
|
||||
self.on_transaction_id(claim_tx.id),
|
||||
self.on_transaction_id(cert_tx.id),
|
||||
])
|
||||
await self.blockchain.generate(1)
|
||||
await asyncio.wait([ # confirmed
|
||||
self.on_transaction(claim_tx),
|
||||
self.on_transaction(cert_tx),
|
||||
self.on_transaction_id(claim_tx.id),
|
||||
self.on_transaction_id(cert_tx.id),
|
||||
])
|
||||
|
||||
self.assertEqual(round(await d2f(self.account.get_balance(0))/COIN, 1), 8.0)
|
||||
|
|
|
@ -94,7 +94,7 @@ class PointTraderKeyExchanger(object):
|
|||
|
||||
def send_next_request(self, peer, protocol):
|
||||
if not protocol in self._protocols:
|
||||
r = ClientRequest({'public_key': self.wallet.encoded_public_key},
|
||||
r = ClientRequest({'public_key': self.wallet.encoded_public_key.decode()},
|
||||
'public_key')
|
||||
d = protocol.add_request(r)
|
||||
d.addCallback(self._handle_exchange_response, peer, r, protocol)
|
||||
|
@ -156,7 +156,7 @@ class PointTraderKeyQueryHandler(object):
|
|||
return defer.fail(Failure(value_error))
|
||||
self.public_key = new_encoded_pub_key
|
||||
self.wallet.set_public_key_for_peer(self.peer, self.public_key)
|
||||
fields = {'public_key': self.wallet.encoded_public_key}
|
||||
fields = {'public_key': self.wallet.encoded_public_key.decode()}
|
||||
return defer.succeed(fields)
|
||||
if self.public_key is None:
|
||||
return defer.fail(Failure(ValueError("Expected but did not receive a public key")))
|
||||
|
@ -268,7 +268,7 @@ class GenFile(io.RawIOBase):
|
|||
def __init__(self, size, pattern):
|
||||
io.RawIOBase.__init__(self)
|
||||
self.size = size
|
||||
self.pattern = pattern.encode()
|
||||
self.pattern = pattern.encode() if not isinstance(pattern, bytes) else pattern
|
||||
self.read_so_far = 0
|
||||
self.buff = b''
|
||||
self.last_offset = 0
|
||||
|
|
|
@ -73,8 +73,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def test_can_create_file(self):
|
||||
expected_stream_hash = b"41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
|
||||
b"7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
|
||||
expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
|
||||
"7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
|
||||
expected_sd_hash = "40c485432daec586c1a2d247e6c08d137640a5af6e81f3f652" \
|
||||
"3e62e81a2e8945b0db7c94f1852e70e371d917b994352c"
|
||||
filename = 'test.file'
|
||||
|
@ -106,8 +106,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def test_can_create_file_with_unicode_filename(self):
|
||||
expected_stream_hash = (b'd1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
|
||||
b'2fd5adb86fe144e93e110075b5865fff8617776c6c0')
|
||||
expected_stream_hash = ('d1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
|
||||
'2fd5adb86fe144e93e110075b5865fff8617776c6c0')
|
||||
filename = u'☃.file'
|
||||
lbry_file = yield self.create_file(filename)
|
||||
self.assertEqual(expected_stream_hash, lbry_file.stream_hash)
|
||||
|
|
Loading…
Add table
Reference in a new issue