py3 compatibility updates

This commit is contained in:
Lex Berezhny 2018-07-13 00:21:45 -04:00 committed by Jack Robison
parent 43bef9447c
commit 076af7ef43
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
13 changed files with 41 additions and 40 deletions

View file

@ -46,6 +46,8 @@ class BlobFileCreator(object):
def write(self, data):
if not self._is_open:
raise IOError
if not isinstance(data, bytes):
data = data.encode()
self._hashsum.update(data)
self.len_so_far += len(data)
self.buffer.write(data)

View file

@ -283,9 +283,9 @@ def format_blobs(crypt_blob_infos):
for blob_info in crypt_blob_infos:
blob = {}
if blob_info.length != 0:
blob['blob_hash'] = str(blob_info.blob_hash)
blob['blob_hash'] = blob_info.blob_hash
blob['blob_num'] = blob_info.blob_num
blob['iv'] = str(blob_info.iv)
blob['iv'] = blob_info.iv
blob['length'] = blob_info.length
formatted_blobs.append(blob)
return formatted_blobs
@ -355,7 +355,7 @@ def get_blob_hashsum(b):
iv = b['iv']
blob_hashsum = get_lbry_hash_obj()
if length != 0:
blob_hashsum.update(blob_hash)
blob_hashsum.update(blob_hash.encode())
blob_hashsum.update(str(blob_num).encode())
blob_hashsum.update(iv)
blob_hashsum.update(str(length).encode())

View file

@ -148,7 +148,7 @@ class CryptStreamBlobMaker(object):
def close(self):
log.debug("closing blob %s with plaintext len %s", str(self.blob_num), str(self.length))
if self.length != 0:
self.length += (AES.block_size / 8) - (self.length % (AES.block_size / 8))
self.length += (AES.block_size // 8) - (self.length % (AES.block_size // 8))
padded_data = self.padder.finalize()
encrypted_data = self.cipher.update(padded_data) + self.cipher.finalize()
self.blob.write(encrypted_data)

View file

@ -121,7 +121,7 @@ class CryptStreamCreator(object):
yield defer.DeferredList(self.finished_deferreds)
self.blob_count += 1
iv = next(self.iv_generator)
iv = next(self.iv_generator).encode()
final_blob = self._get_blob_maker(iv, self.blob_manager.get_blob_creator())
stream_terminator = yield final_blob.close()
terminator_info = yield self._blob_finished(stream_terminator)
@ -132,7 +132,7 @@ class CryptStreamCreator(object):
if self.current_blob is None:
self.next_blob_creator = self.blob_manager.get_blob_creator()
self.blob_count += 1
iv = next(self.iv_generator)
iv = next(self.iv_generator).encode()
self.current_blob = self._get_blob_maker(iv, self.next_blob_creator)
done, num_bytes_written = self.current_blob.write(data)
data = data[num_bytes_written:]

View file

@ -388,7 +388,7 @@ class Daemon(AuthJSONRPCServer):
"nout": nout,
"tx": hexlify(tx.raw),
"fee": str(Decimal(tx.fee) / COIN),
"claim_id": tx.get_claim_id(0),
"claim_id": hexlify(tx.get_claim_id(0)),
"value": hexlify(script.values['claim']),
"claim_address": self.ledger.hash160_to_address(script.values['pubkey_hash'])
})

View file

@ -687,8 +687,11 @@ class SQLiteStorage(WalletDatabase):
).fetchone()
if not known_sd_hash:
raise Exception("stream not found")
known_sd_hash = known_sd_hash[0]
if not isinstance(known_sd_hash, bytes):
known_sd_hash = known_sd_hash.encode()
# check the claim contains the same sd hash
if known_sd_hash[0] != claim.source_hash:
if known_sd_hash != claim.source_hash:
raise Exception("stream mismatch")
# if there is a current claim associated to the file, check that the new claim is an update to it
@ -872,7 +875,7 @@ def _format_claim_response(outpoint, claim_id, name, amount, height, serialized,
"claim_id": claim_id,
"address": address,
"claim_sequence": claim_sequence,
"value": ClaimDict.deserialize(serialized.decode('hex')).claim_dict,
"value": ClaimDict.deserialize(unhexlify(serialized)).claim_dict,
"height": height,
"amount": float(Decimal(amount) / Decimal(COIN)),
"nout": int(outpoint.split(":")[1]),

View file

@ -268,7 +268,7 @@ class GenFile(io.RawIOBase):
def __init__(self, size, pattern):
io.RawIOBase.__init__(self)
self.size = size
self.pattern = pattern
self.pattern = pattern.encode()
self.read_so_far = 0
self.buff = b''
self.last_offset = 0
@ -301,7 +301,7 @@ class GenFile(io.RawIOBase):
def _generate_chunk(self, size=KB):
output = self.pattern[self.last_offset:self.last_offset + size]
n_left = size - len(output)
whole_patterns = n_left / len(self.pattern)
whole_patterns = n_left // len(self.pattern)
output += self.pattern * whole_patterns
self.last_offset = size - len(output)
output += self.pattern[:self.last_offset]

View file

@ -6,7 +6,7 @@ from lbrynet.core.Peer import Peer
from lbrynet.core.PeerManager import PeerManager
from lbrynet.core.Error import NoResponseError
from twisted.trial import unittest
from twisted.trial.unittest import TestCase
from twisted.internet import defer, reactor, task
from twisted.internet.task import deferLater
from twisted.internet.protocol import ServerFactory
@ -24,7 +24,7 @@ class MocDownloader(object):
pass
class MocRequestCreator(object):
implements(IRequestCreator)
#implements(IRequestCreator)
def __init__(self, peers_to_return, peers_to_return_head_blob=[]):
self.peers_to_return = peers_to_return
self.peers_to_return_head_blob = peers_to_return_head_blob
@ -56,7 +56,7 @@ class MocRequestCreator(object):
return self.peers_to_return_head_blob
class MocFunctionalQueryHandler(object):
implements(IQueryHandler)
#implements(IQueryHandler)
def __init__(self, clock, is_good=True, is_delayed=False):
self.query_identifiers = ['moc_request']
@ -83,7 +83,7 @@ class MocFunctionalQueryHandler(object):
class MocQueryHandlerFactory(object):
implements(IQueryHandlerFactory)
#implements(IQueryHandlerFactory)
# is is_good, the query handler works as expectd,
# is is_delayed, the query handler will delay its resposne
def __init__(self, clock, is_good=True, is_delayed=False):
@ -113,7 +113,8 @@ class MocServerProtocolFactory(ServerFactory):
self.query_handler_factories = {}
self.peer_manager = PeerManager()
class TestIntegrationConnectionManager(unittest.TestCase):
class TestIntegrationConnectionManager(TestCase):
def setUp(self):
conf.initialize_settings(False)

View file

@ -1,4 +1,4 @@
import StringIO
from io import StringIO
import mock
from twisted.internet import defer
@ -119,7 +119,7 @@ class TestBlobRequestHandlerSender(unittest.TestCase):
def test_file_is_sent_to_consumer(self):
# TODO: also check that the expected payment values are set
consumer = proto_helpers.StringTransport()
test_file = StringIO.StringIO('test')
test_file = StringIO('test')
handler = BlobRequestHandler.BlobRequestHandler(None, None, None, None)
handler.peer = mock.create_autospec(Peer.Peer)
handler.currently_uploading = mock.Mock()

View file

@ -123,12 +123,12 @@ class StorageTest(unittest.TestCase):
yield self.store_fake_blob(sd_hash)
for blob in blobs.itervalues():
for blob in blobs.values():
yield self.store_fake_blob(blob)
yield self.store_fake_stream(stream_hash, sd_hash)
for pos, blob in sorted(blobs.iteritems(), key=lambda x: x[0]):
for pos, blob in sorted(blobs.items(), key=lambda x: x[0]):
yield self.store_fake_stream_blob(stream_hash, blob, pos)
@ -163,8 +163,8 @@ class BlobStorageTests(StorageTest):
class SupportsStorageTests(StorageTest):
@defer.inlineCallbacks
def test_supports_storage(self):
claim_ids = [random_lbry_hash() for _ in range(10)]
random_supports = [{"txid": random_lbry_hash(), "nout":i, "address": "addr{}".format(i), "amount": i}
claim_ids = [random_lbry_hash().decode() for _ in range(10)]
random_supports = [{"txid": random_lbry_hash().decode(), "nout":i, "address": "addr{}".format(i), "amount": i}
for i in range(20)]
expected_supports = {}
for idx, claim_id in enumerate(claim_ids):
@ -311,11 +311,8 @@ class ContentClaimStorageTests(StorageTest):
# test that we can't associate a claim update with a new stream to the file
second_stream_hash, second_sd_hash = random_lbry_hash(), random_lbry_hash()
yield self.make_and_store_fake_stream(blob_count=2, stream_hash=second_stream_hash, sd_hash=second_sd_hash)
try:
with self.assertRaisesRegex(Exception, "stream mismatch"):
yield self.storage.save_content_claim(second_stream_hash, fake_outpoint)
raise Exception("test failed")
except Exception as err:
self.assertTrue(err.message == "stream mismatch")
# test that we can associate a new claim update containing the same stream to the file
update_info = deepcopy(fake_claim_info)
@ -333,12 +330,9 @@ class ContentClaimStorageTests(StorageTest):
invalid_update_info['nout'] = 0
invalid_update_info['claim_id'] = "beef0002" * 5
invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout'])
try:
with self.assertRaisesRegex(Exception, "invalid stream update"):
yield self.storage.save_claims([invalid_update_info])
yield self.storage.save_content_claim(stream_hash, invalid_update_outpoint)
raise Exception("test failed")
except Exception as err:
self.assertTrue(err.message == "invalid stream update")
current_claim_info = yield self.storage.get_content_claim(stream_hash)
# this should still be the previous update
self.assertDictEqual(current_claim_info, update_info)

View file

@ -12,6 +12,7 @@ from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.database.storage import SQLiteStorage
from lbrynet.file_manager import EncryptedFileCreator
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.core.StreamDescriptor import bytes2unicode
from tests import mocks
from tests.util import mk_db_and_blob_dir, rm_db_and_blob_dir
@ -29,7 +30,7 @@ MB = 2**20
def iv_generator():
while True:
yield '3' * (AES.block_size / 8)
yield '3' * (AES.block_size // 8)
class CreateEncryptedFileTest(unittest.TestCase):
@ -62,7 +63,7 @@ class CreateEncryptedFileTest(unittest.TestCase):
@defer.inlineCallbacks
def create_file(self, filename):
handle = mocks.GenFile(3*MB, '1')
key = '2' * (AES.block_size / 8)
key = b'2' * (AES.block_size // 8)
out = yield EncryptedFileCreator.create_lbry_file(
self.blob_manager, self.storage, self.prm, self.lbry_file_manager, filename, handle, key, iv_generator()
)
@ -70,8 +71,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
@defer.inlineCallbacks
def test_can_create_file(self):
expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
"7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
expected_stream_hash = b"41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
b"7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
expected_sd_hash = "40c485432daec586c1a2d247e6c08d137640a5af6e81f3f652" \
"3e62e81a2e8945b0db7c94f1852e70e371d917b994352c"
filename = 'test.file'
@ -85,8 +86,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
# this comes from the database, the blobs returned are sorted
sd_info = yield get_sd_info(self.storage, lbry_file.stream_hash, include_blobs=True)
self.assertDictEqual(sd_info, sd_file_info)
self.assertListEqual(sd_info['blobs'], sd_file_info['blobs'])
self.maxDiff = None
self.assertDictEqual(bytes2unicode(sd_info), sd_file_info)
self.assertEqual(sd_info['stream_hash'], expected_stream_hash)
self.assertEqual(len(sd_info['blobs']), 3)
self.assertNotEqual(sd_info['blobs'][0]['length'], 0)
@ -102,8 +103,8 @@ class CreateEncryptedFileTest(unittest.TestCase):
@defer.inlineCallbacks
def test_can_create_file_with_unicode_filename(self):
expected_stream_hash = ('d1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
'2fd5adb86fe144e93e110075b5865fff8617776c6c0')
expected_stream_hash = (b'd1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
b'2fd5adb86fe144e93e110075b5865fff8617776c6c0')
filename = u'☃.file'
lbry_file = yield self.create_file(filename)
self.assertEqual(expected_stream_hash, lbry_file.stream_hash)

View file

View file

@ -1,12 +1,12 @@
from binascii import hexlify, unhexlify
from twisted.trial import unittest
from torba.baseaccount import Account
from torba.constants import CENT, COIN
from torba.wallet import Wallet
from torba.basetransaction import NULL_HASH
from lbrynet.wallet.coin import LBC
from lbrynet.wallet.account import Account
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.wallet.transaction import Transaction, Output, Input
from lbrynet.wallet.manager import LbryWalletManager