progress on publish command: py3 porting and integration tests
This commit is contained in:
parent
b1c5fe0b4d
commit
43bef9447c
8 changed files with 101 additions and 24 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
import six
|
||||||
import binascii
|
import binascii
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import json
|
import json
|
||||||
|
@ -66,6 +67,16 @@ class BlobStreamDescriptorReader(StreamDescriptorReader):
|
||||||
return threads.deferToThread(get_data)
|
return threads.deferToThread(get_data)
|
||||||
|
|
||||||
|
|
||||||
|
def bytes2unicode(value):
|
||||||
|
if isinstance(value, bytes):
|
||||||
|
return value.decode()
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
return [bytes2unicode(v) for v in value]
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
return {key: bytes2unicode(v) for key, v in value.items()}
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
class StreamDescriptorWriter(object):
|
class StreamDescriptorWriter(object):
|
||||||
"""Classes which derive from this class write fields from a dictionary
|
"""Classes which derive from this class write fields from a dictionary
|
||||||
of fields to a stream descriptor"""
|
of fields to a stream descriptor"""
|
||||||
|
@ -73,7 +84,7 @@ class StreamDescriptorWriter(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def create_descriptor(self, sd_info):
|
def create_descriptor(self, sd_info):
|
||||||
return self._write_stream_descriptor(json.dumps(sd_info, sort_keys=True))
|
return self._write_stream_descriptor(json.dumps(bytes2unicode(sd_info), sort_keys=True))
|
||||||
|
|
||||||
def _write_stream_descriptor(self, raw_data):
|
def _write_stream_descriptor(self, raw_data):
|
||||||
"""This method must be overridden by subclasses to write raw data to
|
"""This method must be overridden by subclasses to write raw data to
|
||||||
|
@ -345,9 +356,9 @@ def get_blob_hashsum(b):
|
||||||
blob_hashsum = get_lbry_hash_obj()
|
blob_hashsum = get_lbry_hash_obj()
|
||||||
if length != 0:
|
if length != 0:
|
||||||
blob_hashsum.update(blob_hash)
|
blob_hashsum.update(blob_hash)
|
||||||
blob_hashsum.update(str(blob_num))
|
blob_hashsum.update(str(blob_num).encode())
|
||||||
blob_hashsum.update(iv)
|
blob_hashsum.update(iv)
|
||||||
blob_hashsum.update(str(length))
|
blob_hashsum.update(str(length).encode())
|
||||||
return blob_hashsum.digest()
|
return blob_hashsum.digest()
|
||||||
|
|
||||||
|
|
||||||
|
@ -365,7 +376,7 @@ def get_stream_hash(hex_stream_name, key, hex_suggested_file_name, blob_infos):
|
||||||
|
|
||||||
def verify_hex(text, field_name):
|
def verify_hex(text, field_name):
|
||||||
for c in text:
|
for c in text:
|
||||||
if c not in '0123456789abcdef':
|
if c not in b'0123456789abcdef':
|
||||||
raise InvalidStreamDescriptorError("%s is not a hex-encoded string" % field_name)
|
raise InvalidStreamDescriptorError("%s is not a hex-encoded string" % field_name)
|
||||||
|
|
||||||
|
|
||||||
|
@ -391,7 +402,7 @@ def validate_descriptor(stream_info):
|
||||||
|
|
||||||
calculated_stream_hash = get_stream_hash(
|
calculated_stream_hash = get_stream_hash(
|
||||||
hex_stream_name, key, hex_suggested_file_name, blobs
|
hex_stream_name, key, hex_suggested_file_name, blobs
|
||||||
)
|
).encode()
|
||||||
if calculated_stream_hash != stream_hash:
|
if calculated_stream_hash != stream_hash:
|
||||||
raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
|
raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -332,6 +332,7 @@ class WalletComponent(Component):
|
||||||
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
storage = self.component_manager.get_component(DATABASE_COMPONENT)
|
||||||
lbryschema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
|
lbryschema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
|
||||||
self.wallet = LbryWalletManager.from_old_config(conf.settings)
|
self.wallet = LbryWalletManager.from_old_config(conf.settings)
|
||||||
|
self.wallet.old_db = storage
|
||||||
yield self.wallet.start()
|
yield self.wallet.start()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
|
|
|
@ -49,7 +49,7 @@ class Publisher(object):
|
||||||
|
|
||||||
# check if we have a file already for this claim (if this is a publish update with a new stream)
|
# check if we have a file already for this claim (if this is a publish update with a new stream)
|
||||||
old_stream_hashes = yield self.storage.get_old_stream_hashes_for_claim_id(
|
old_stream_hashes = yield self.storage.get_old_stream_hashes_for_claim_id(
|
||||||
tx.get_claim_id(0).decode(), self.lbry_file.stream_hash
|
tx.get_claim_id(0), self.lbry_file.stream_hash.decode()
|
||||||
)
|
)
|
||||||
if old_stream_hashes:
|
if old_stream_hashes:
|
||||||
for lbry_file in filter(lambda l: l.stream_hash in old_stream_hashes,
|
for lbry_file in filter(lambda l: l.stream_hash in old_stream_hashes,
|
||||||
|
@ -58,7 +58,7 @@ class Publisher(object):
|
||||||
log.info("Removed old stream for claim update: %s", lbry_file.stream_hash)
|
log.info("Removed old stream for claim update: %s", lbry_file.stream_hash)
|
||||||
|
|
||||||
yield self.storage.save_content_claim(
|
yield self.storage.save_content_claim(
|
||||||
self.lbry_file.stream_hash, get_certificate_lookup(tx, 0).decode()
|
self.lbry_file.stream_hash.decode(), get_certificate_lookup(tx, 0)
|
||||||
)
|
)
|
||||||
defer.returnValue(tx)
|
defer.returnValue(tx)
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class Publisher(object):
|
||||||
)
|
)
|
||||||
if stream_hash: # the stream_hash returned from the db will be None if this isn't a stream we have
|
if stream_hash: # the stream_hash returned from the db will be None if this isn't a stream we have
|
||||||
yield self.storage.save_content_claim(
|
yield self.storage.save_content_claim(
|
||||||
stream_hash, get_certificate_lookup(tx, 0).decode()
|
stream_hash.decode(), get_certificate_lookup(tx, 0)
|
||||||
)
|
)
|
||||||
self.lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0]
|
self.lbry_file = [f for f in self.lbry_file_manager.lbry_files if f.stream_hash == stream_hash][0]
|
||||||
defer.returnValue(tx)
|
defer.returnValue(tx)
|
||||||
|
|
|
@ -2,6 +2,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import traceback
|
import traceback
|
||||||
|
from binascii import hexlify, unhexlify
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from twisted.internet import defer, task, threads
|
from twisted.internet import defer, task, threads
|
||||||
from twisted.enterprise import adbapi
|
from twisted.enterprise import adbapi
|
||||||
|
@ -613,7 +614,7 @@ class SQLiteStorage(WalletDatabase):
|
||||||
source_hash = None
|
source_hash = None
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
source_hash = None
|
source_hash = None
|
||||||
serialized = claim_info.get('hex') or smart_decode(claim_info['value']).serialized.encode('hex')
|
serialized = claim_info.get('hex') or hexlify(smart_decode(claim_info['value']).serialized)
|
||||||
transaction.execute(
|
transaction.execute(
|
||||||
"insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
"insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||||
(outpoint, claim_id, name, amount, height, serialized, certificate_id, address, sequence)
|
(outpoint, claim_id, name, amount, height, serialized, certificate_id, address, sequence)
|
||||||
|
@ -671,12 +672,15 @@ class SQLiteStorage(WalletDatabase):
|
||||||
).fetchone()
|
).fetchone()
|
||||||
if not claim_info:
|
if not claim_info:
|
||||||
raise Exception("claim not found")
|
raise Exception("claim not found")
|
||||||
new_claim_id, claim = claim_info[0], ClaimDict.deserialize(claim_info[1].decode('hex'))
|
new_claim_id, claim = claim_info[0], ClaimDict.deserialize(unhexlify(claim_info[1]))
|
||||||
|
|
||||||
# certificate claims should not be in the content_claim table
|
# certificate claims should not be in the content_claim table
|
||||||
if not claim.is_stream:
|
if not claim.is_stream:
|
||||||
raise Exception("claim does not contain a stream")
|
raise Exception("claim does not contain a stream")
|
||||||
|
|
||||||
|
if not isinstance(stream_hash, bytes):
|
||||||
|
stream_hash = stream_hash.encode()
|
||||||
|
|
||||||
# get the known sd hash for this stream
|
# get the known sd hash for this stream
|
||||||
known_sd_hash = transaction.execute(
|
known_sd_hash = transaction.execute(
|
||||||
"select sd_hash from stream where stream_hash=?", (stream_hash,)
|
"select sd_hash from stream where stream_hash=?", (stream_hash,)
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
Utilities for turning plain files into LBRY Files.
|
Utilities for turning plain files into LBRY Files.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import six
|
||||||
import binascii
|
import binascii
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -44,7 +45,7 @@ class EncryptedFileStreamCreator(CryptStreamCreator):
|
||||||
# generate the sd info
|
# generate the sd info
|
||||||
self.sd_info = format_sd_info(
|
self.sd_info = format_sd_info(
|
||||||
EncryptedFileStreamType, hexlify(self.name), hexlify(self.key),
|
EncryptedFileStreamType, hexlify(self.name), hexlify(self.key),
|
||||||
hexlify(self.name), self.stream_hash, self.blob_infos
|
hexlify(self.name), self.stream_hash.encode(), self.blob_infos
|
||||||
)
|
)
|
||||||
|
|
||||||
# sanity check
|
# sanity check
|
||||||
|
@ -125,14 +126,14 @@ def create_lbry_file(blob_manager, storage, payment_rate_manager, lbry_file_mana
|
||||||
)
|
)
|
||||||
log.debug("adding to the file manager")
|
log.debug("adding to the file manager")
|
||||||
lbry_file = yield lbry_file_manager.add_published_file(
|
lbry_file = yield lbry_file_manager.add_published_file(
|
||||||
sd_info['stream_hash'], sd_hash, binascii.hexlify(file_directory), payment_rate_manager,
|
sd_info['stream_hash'], sd_hash, binascii.hexlify(file_directory.encode()), payment_rate_manager,
|
||||||
payment_rate_manager.min_blob_data_payment_rate
|
payment_rate_manager.min_blob_data_payment_rate
|
||||||
)
|
)
|
||||||
defer.returnValue(lbry_file)
|
defer.returnValue(lbry_file)
|
||||||
|
|
||||||
|
|
||||||
def hexlify(str_or_unicode):
|
def hexlify(str_or_unicode):
|
||||||
if isinstance(str_or_unicode, unicode):
|
if isinstance(str_or_unicode, six.text_type):
|
||||||
strng = str_or_unicode.encode('utf-8')
|
strng = str_or_unicode.encode('utf-8')
|
||||||
else:
|
else:
|
||||||
strng = str_or_unicode
|
strng = str_or_unicode
|
||||||
|
|
|
@ -16,8 +16,8 @@ class EncryptedFileReflectorClient(Protocol):
|
||||||
# Protocol stuff
|
# Protocol stuff
|
||||||
def connectionMade(self):
|
def connectionMade(self):
|
||||||
log.debug("Connected to reflector")
|
log.debug("Connected to reflector")
|
||||||
self.response_buff = ''
|
self.response_buff = b''
|
||||||
self.outgoing_buff = ''
|
self.outgoing_buff = b''
|
||||||
self.blob_hashes_to_send = []
|
self.blob_hashes_to_send = []
|
||||||
self.failed_blob_hashes = []
|
self.failed_blob_hashes = []
|
||||||
self.next_blob_to_send = None
|
self.next_blob_to_send = None
|
||||||
|
@ -50,7 +50,7 @@ class EncryptedFileReflectorClient(Protocol):
|
||||||
except IncompleteResponse:
|
except IncompleteResponse:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
self.response_buff = ''
|
self.response_buff = b''
|
||||||
d = self.handle_response(msg)
|
d = self.handle_response(msg)
|
||||||
d.addCallback(lambda _: self.send_next_request())
|
d.addCallback(lambda _: self.send_next_request())
|
||||||
d.addErrback(self.response_failure_handler)
|
d.addErrback(self.response_failure_handler)
|
||||||
|
@ -143,7 +143,7 @@ class EncryptedFileReflectorClient(Protocol):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def send_request(self, request_dict):
|
def send_request(self, request_dict):
|
||||||
self.write(json.dumps(request_dict))
|
self.write(json.dumps(request_dict).encode())
|
||||||
|
|
||||||
def send_handshake(self):
|
def send_handshake(self):
|
||||||
self.send_request({'version': self.protocol_version})
|
self.send_request({'version': self.protocol_version})
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
from binascii import hexlify
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from torba.manager import WalletManager as BaseWalletManager
|
from torba.manager import WalletManager as BaseWalletManager
|
||||||
|
@ -160,9 +161,25 @@ class LbryWalletManager(BaseWalletManager):
|
||||||
)
|
)
|
||||||
tx = yield Transaction.claim(name.encode(), claim, amount, claim_address, [account], account)
|
tx = yield Transaction.claim(name.encode(), claim, amount, claim_address, [account], account)
|
||||||
yield account.ledger.broadcast(tx)
|
yield account.ledger.broadcast(tx)
|
||||||
|
yield self.old_db.save_claims([self._old_get_temp_claim_info(
|
||||||
|
tx, tx.outputs[0], claim_address, claim_dict, name, amount
|
||||||
|
)])
|
||||||
# TODO: release reserved tx outputs in case anything fails by this point
|
# TODO: release reserved tx outputs in case anything fails by this point
|
||||||
defer.returnValue(tx)
|
defer.returnValue(tx)
|
||||||
|
|
||||||
|
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
|
||||||
|
return {
|
||||||
|
"claim_id": hexlify(tx.get_claim_id(txo.index)).decode(),
|
||||||
|
"name": name,
|
||||||
|
"amount": bid,
|
||||||
|
"address": address.decode(),
|
||||||
|
"txid": tx.hex_id.decode(),
|
||||||
|
"nout": txo.index,
|
||||||
|
"value": claim_dict,
|
||||||
|
"height": -1,
|
||||||
|
"claim_sequence": -1,
|
||||||
|
}
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def claim_new_channel(self, channel_name, amount):
|
def claim_new_channel(self, channel_name, amount):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
|
import six
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from types import SimpleNamespace
|
||||||
|
from binascii import hexlify
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from orchstr8.testcase import IntegrationTestCase, d2f
|
from orchstr8.testcase import IntegrationTestCase, d2f
|
||||||
|
@ -25,14 +28,49 @@ class FakeAnalytics:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class FakeBlob:
|
||||||
|
def __init__(self):
|
||||||
|
self.data = []
|
||||||
|
self.blob_hash = 'abc'
|
||||||
|
self.length = 3
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
self.data.append(data)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self.data:
|
||||||
|
return defer.succeed(hexlify(b'a'*48))
|
||||||
|
return defer.succeed(None)
|
||||||
|
|
||||||
|
def get_is_verified(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def open_for_reading(self):
|
||||||
|
return six.StringIO('foo')
|
||||||
|
|
||||||
|
|
||||||
class FakeBlobManager:
|
class FakeBlobManager:
|
||||||
def get_blob_creator(self):
|
def get_blob_creator(self):
|
||||||
return None
|
return FakeBlob()
|
||||||
|
|
||||||
|
def creator_finished(self, blob_info, should_announce):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_blob(self, sd_hash):
|
||||||
|
return FakeBlob()
|
||||||
|
|
||||||
|
|
||||||
class FakeSession:
|
class FakeSession:
|
||||||
storage = None
|
|
||||||
blob_manager = FakeBlobManager()
|
blob_manager = FakeBlobManager()
|
||||||
|
peer_finder = None
|
||||||
|
rate_limiter = None
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def payment_rate_manager(self):
|
||||||
|
obj = SimpleNamespace()
|
||||||
|
obj.min_blob_data_payment_rate = 1
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
class CommandTestCase(IntegrationTestCase):
|
class CommandTestCase(IntegrationTestCase):
|
||||||
|
@ -68,22 +106,27 @@ class CommandTestCase(IntegrationTestCase):
|
||||||
self.daemon.wallet = self.manager
|
self.daemon.wallet = self.manager
|
||||||
self.daemon.component_manager.components.add(wallet_component)
|
self.daemon.component_manager.components.add(wallet_component)
|
||||||
|
|
||||||
|
storage_component = DatabaseComponent(self.daemon.component_manager)
|
||||||
|
await d2f(storage_component.start())
|
||||||
|
self.daemon.storage = storage_component.storage
|
||||||
|
self.daemon.wallet.old_db = self.daemon.storage
|
||||||
|
self.daemon.component_manager.components.add(storage_component)
|
||||||
|
|
||||||
session_component = SessionComponent(self.daemon.component_manager)
|
session_component = SessionComponent(self.daemon.component_manager)
|
||||||
session_component.session = FakeSession()
|
session_component.session = FakeSession()
|
||||||
session_component._running = True
|
session_component._running = True
|
||||||
self.daemon.session = session_component.session
|
self.daemon.session = session_component.session
|
||||||
|
self.daemon.session.storage = self.daemon.storage
|
||||||
|
self.daemon.session.wallet = self.daemon.wallet
|
||||||
|
self.daemon.session.blob_manager.storage = self.daemon.storage
|
||||||
self.daemon.component_manager.components.add(session_component)
|
self.daemon.component_manager.components.add(session_component)
|
||||||
|
|
||||||
file_manager = FileManager(self.daemon.component_manager)
|
file_manager = FileManager(self.daemon.component_manager)
|
||||||
file_manager.file_manager = EncryptedFileManager(session_component.session, True)
|
file_manager.file_manager = EncryptedFileManager(session_component.session, True)
|
||||||
file_manager._running = True
|
file_manager._running = True
|
||||||
|
self.daemon.file_manager = file_manager.file_manager
|
||||||
self.daemon.component_manager.components.add(file_manager)
|
self.daemon.component_manager.components.add(file_manager)
|
||||||
|
|
||||||
storage_component = DatabaseComponent(self.daemon.component_manager)
|
|
||||||
await d2f(storage_component.start())
|
|
||||||
self.daemon.storage = storage_component.storage
|
|
||||||
self.daemon.component_manager.components.add(storage_component)
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelNewCommandTests(CommandTestCase):
|
class ChannelNewCommandTests(CommandTestCase):
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue