hash -> sd_hash, encoding fixes, update to new protobufs
This commit is contained in:
parent
cd266477fb
commit
5f8482bdb7
11 changed files with 2907 additions and 114 deletions
|
@ -2118,7 +2118,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
)
|
||||
|
||||
claim = Claim()
|
||||
claim.stream.update(file_path=file_path, hash='0'*96, **kwargs)
|
||||
claim.stream.update(file_path=file_path, sd_hash='0'*96, **kwargs)
|
||||
tx = await Transaction.claim_create(
|
||||
name, claim, amount, claim_address, [account], account, channel
|
||||
)
|
||||
|
@ -2126,7 +2126,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
if not preview:
|
||||
file_stream = await self.stream_manager.create_stream(file_path)
|
||||
claim.stream.hash = file_stream.sd_hash
|
||||
claim.stream.sd_hash = file_stream.sd_hash
|
||||
new_txo.script.generate()
|
||||
if channel:
|
||||
new_txo.sign(channel)
|
||||
|
@ -2135,7 +2135,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await self.storage.save_claims([self._old_get_temp_claim_info(
|
||||
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
|
||||
)])
|
||||
stream_hash = await self.storage.get_stream_hash_for_sd_hash(claim.stream.hash)
|
||||
stream_hash = await self.storage.get_stream_hash_for_sd_hash(claim.stream.sd_hash)
|
||||
if stream_hash:
|
||||
await self.storage.save_content_claim(stream_hash, new_txo.id)
|
||||
await self.analytics_manager.send_claim_action('publish')
|
||||
|
@ -2246,7 +2246,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
if not preview:
|
||||
if file_path is not None:
|
||||
file_stream = await self.stream_manager.create_stream(file_path)
|
||||
new_txo.claim.stream.hash = file_stream.sd_hash
|
||||
new_txo.claim.stream.sd_hash = file_stream.sd_hash
|
||||
new_txo.script.generate()
|
||||
if channel:
|
||||
new_txo.sign(channel)
|
||||
|
@ -2255,7 +2255,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await self.storage.save_claims([self._old_get_temp_claim_info(
|
||||
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
|
||||
)])
|
||||
stream_hash = await self.storage.get_stream_hash_for_sd_hash(new_txo.claim.stream.hash)
|
||||
stream_hash = await self.storage.get_stream_hash_for_sd_hash(new_txo.claim.stream.sd_hash)
|
||||
if stream_hash:
|
||||
await self.storage.save_content_claim(stream_hash, new_txo.id)
|
||||
await self.analytics_manager.send_claim_action('publish')
|
||||
|
@ -3002,8 +3002,11 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
raise Exception(f"Couldn't find private key for {key} '{value}'. ")
|
||||
return channels[0]
|
||||
elif len(channels) > 1:
|
||||
raise ValueError(f"Multiple channels found with {key} '{value}', pass a channel_id to narrow it down.")
|
||||
raise ValueError(f"Couldn't find channel with {key} '{value}'.")
|
||||
raise ValueError(
|
||||
f"Multiple channels found with channel_{key} '{value}', "
|
||||
f"pass a channel_id to narrow it down."
|
||||
)
|
||||
raise ValueError(f"Couldn't find channel with channel_{key} '{value}'.")
|
||||
|
||||
def get_account_or_default(self, account_id: str, argument_name: str = "account", lbc_only=True) -> LBCAccount:
|
||||
if account_id is None:
|
||||
|
|
|
@ -27,7 +27,11 @@ class JSONResponseEncoder(JSONEncoder):
|
|||
if isinstance(obj, Claim):
|
||||
claim_dict = obj.to_dict()
|
||||
if obj.is_stream:
|
||||
claim_dict['stream']['hash'] = obj.stream.hash
|
||||
claim_dict['stream']['sd_hash'] = obj.stream.sd_hash
|
||||
if 'fee' in claim_dict['stream'] and 'address' in claim_dict['stream']['fee']:
|
||||
claim_dict['stream']['fee']['address'] = obj.stream.fee.address
|
||||
elif obj.is_channel:
|
||||
claim_dict['channel']['public_key'] = obj.channel.public_key
|
||||
return claim_dict
|
||||
if isinstance(obj, datetime):
|
||||
return obj.strftime("%Y%m%dT%H:%M:%S")
|
||||
|
|
|
@ -591,7 +591,7 @@ class SQLiteStorage(SQLiteMixin):
|
|||
sequence = claim_info['claim_sequence']
|
||||
certificate_id = claim_info['value'].signing_channel_id
|
||||
try:
|
||||
source_hash = claim_info['value'].stream.hash
|
||||
source_hash = claim_info['value'].stream.sd_hash
|
||||
except (AttributeError, ValueError):
|
||||
source_hash = None
|
||||
serialized = binascii.hexlify(claim_info['value'].to_bytes())
|
||||
|
@ -671,7 +671,7 @@ class SQLiteStorage(SQLiteMixin):
|
|||
if not known_sd_hash:
|
||||
raise Exception("stream not found")
|
||||
# check the claim contains the same sd hash
|
||||
if known_sd_hash[0] != claim.stream.hash:
|
||||
if known_sd_hash[0] != claim.stream.sd_hash:
|
||||
raise Exception("stream mismatch")
|
||||
|
||||
# if there is a current claim associated to the file, check that the new claim is an update to it
|
||||
|
|
|
@ -12,10 +12,15 @@ from hachoir.core.log import log as hachoir_log
|
|||
from torba.client.hash import Base58
|
||||
from torba.client.constants import COIN
|
||||
|
||||
from lbrynet.schema.types.v2.claim_pb2 import Claim as ClaimMessage, Fee as FeeMessage
|
||||
from lbrynet.schema import compat
|
||||
from lbrynet.schema.base import Signable
|
||||
from lbrynet.schema.mime_types import guess_media_type
|
||||
from lbrynet.schema.types.v2.claim_pb2 import (
|
||||
Claim as ClaimMessage,
|
||||
Fee as FeeMessage,
|
||||
Location as LocationMessage,
|
||||
Language as LanguageMessage
|
||||
)
|
||||
|
||||
|
||||
hachoir_log.use_print = False
|
||||
|
@ -297,11 +302,37 @@ class BaseClaimSubType:
|
|||
|
||||
@property
|
||||
def language(self) -> str:
|
||||
return self.message.language
|
||||
if len(self.languages) > 0:
|
||||
return LanguageMessage.Language.Name(self.languages[0].language)
|
||||
|
||||
@language.setter
|
||||
def language(self, language: str):
|
||||
self.message.language = language
|
||||
value = LanguageMessage.Language.Value(language)
|
||||
if len(self.languages) > 0:
|
||||
self.languages[0].language = value
|
||||
else:
|
||||
self.languages.add().language = value
|
||||
|
||||
@property
|
||||
def languages(self):
|
||||
return self.message.languages
|
||||
|
||||
@property
|
||||
def location_country(self) -> str:
|
||||
if len(self.locations) > 0:
|
||||
return LocationMessage.Country.Name(self.locations[0].country)
|
||||
|
||||
@location_country.setter
|
||||
def location_country(self, country: str):
|
||||
value = LocationMessage.Country.Value(country)
|
||||
if len(self.locations) > 0:
|
||||
self.locations[0].location = value
|
||||
else:
|
||||
self.locations.add().location = value
|
||||
|
||||
@property
|
||||
def locations(self):
|
||||
return self.message.locations
|
||||
|
||||
def to_dict(self):
|
||||
return MessageToDict(self.message, preserving_proto_field_name=True)
|
||||
|
@ -429,20 +460,20 @@ class Stream(BaseClaimSubType):
|
|||
raise Exception(f'Unknown currency type: {fee_currency}')
|
||||
|
||||
@property
|
||||
def hash(self) -> str:
|
||||
return hexlify(self.message.hash).decode()
|
||||
def sd_hash(self) -> str:
|
||||
return hexlify(self.message.sd_hash).decode()
|
||||
|
||||
@hash.setter
|
||||
def hash(self, sd_hash: str):
|
||||
self.message.hash = unhexlify(sd_hash.encode())
|
||||
@sd_hash.setter
|
||||
def sd_hash(self, sd_hash: str):
|
||||
self.message.sd_hash = unhexlify(sd_hash.encode())
|
||||
|
||||
@property
|
||||
def hash_bytes(self) -> bytes:
|
||||
return self.message.hash
|
||||
def sd_hash_bytes(self) -> bytes:
|
||||
return self.message.sd_hash
|
||||
|
||||
@hash_bytes.setter
|
||||
def hash_bytes(self, hash: bytes):
|
||||
self.message.hash = hash
|
||||
@sd_hash_bytes.setter
|
||||
def sd_hash_bytes(self, sd_hash: bytes):
|
||||
self.message.sd_hash = sd_hash
|
||||
|
||||
@property
|
||||
def author(self) -> str:
|
||||
|
|
|
@ -17,8 +17,15 @@ def from_old_json_schema(claim, payload: bytes):
|
|||
stream.author = value.get('author', '')
|
||||
stream.license = value.get('license', '')
|
||||
stream.license_url = value.get('license_url', '')
|
||||
stream.language = value.get('language', '')
|
||||
stream.hash = value['sources']['lbry_sd_hash']
|
||||
language = value.get('language', '')
|
||||
if language:
|
||||
if language.lower() == 'english':
|
||||
language = 'en'
|
||||
try:
|
||||
stream.language = language
|
||||
except:
|
||||
pass
|
||||
stream.sd_hash = value['sources']['lbry_sd_hash']
|
||||
if value.get('nsfw', False):
|
||||
stream.tags.append('mature')
|
||||
if "fee" in value:
|
||||
|
@ -45,9 +52,10 @@ def from_types_v1(claim, payload: bytes):
|
|||
stream.license = old.stream.metadata.license
|
||||
stream.license_url = old.stream.metadata.licenseUrl
|
||||
stream.thumbnail_url = old.stream.metadata.thumbnail
|
||||
stream.language = MetadataMessage.Language.Name(old.stream.metadata.language)
|
||||
if old.stream.metadata.HasField('language'):
|
||||
stream.languages.add().language = old.stream.metadata.language
|
||||
stream.media_type = old.stream.source.contentType
|
||||
stream.hash_bytes = old.stream.source.source
|
||||
stream.sd_hash_bytes = old.stream.source.source
|
||||
if old.stream.metadata.nsfw:
|
||||
stream.tags.append('mature')
|
||||
if old.stream.metadata.HasField('fee'):
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -353,7 +353,7 @@ class StreamManager:
|
|||
if existing:
|
||||
await self.start_stream(existing[0])
|
||||
return existing[0], None
|
||||
existing = self.get_filtered_streams(sd_hash=claim.stream.hash)
|
||||
existing = self.get_filtered_streams(sd_hash=claim.stream.sd_hash)
|
||||
if existing and existing[0].claim_id != claim_id:
|
||||
raise ResolveError(f"stream for {existing[0].claim_id} collides with existing "
|
||||
f"download {claim_id}")
|
||||
|
@ -437,7 +437,7 @@ class StreamManager:
|
|||
|
||||
# download the stream
|
||||
download_id = binascii.hexlify(generate_id()).decode()
|
||||
downloader = StreamDownloader(self.loop, self.config, self.blob_manager, claim.stream.hash,
|
||||
downloader = StreamDownloader(self.loop, self.config, self.blob_manager, claim.stream.sd_hash,
|
||||
self.config.download_dir, file_name)
|
||||
|
||||
stream = None
|
||||
|
@ -484,7 +484,7 @@ class StreamManager:
|
|||
None if not stream else len(stream.downloader.blob_downloader.scores),
|
||||
False if not downloader else downloader.added_fixed_peers,
|
||||
self.config.fixed_peer_delay if not downloader else downloader.fixed_peers_delay,
|
||||
claim.stream.hash, time_to_descriptor,
|
||||
claim.stream.sd_hash, time_to_descriptor,
|
||||
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash,
|
||||
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length,
|
||||
time_to_first_bytes, None if not error else error.__class__.__name__
|
||||
|
|
|
@ -116,7 +116,7 @@ def get_sd_hash(stream_info):
|
|||
if not stream_info:
|
||||
return None
|
||||
if isinstance(stream_info, Claim):
|
||||
return stream_info.stream.hash
|
||||
return stream_info.stream.sd_hash
|
||||
result = stream_info.get('claim', {}).\
|
||||
get('value', {}).\
|
||||
get('stream', {}).\
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
import hashlib
|
||||
import tempfile
|
||||
import logging
|
||||
from binascii import unhexlify
|
||||
|
||||
import base64
|
||||
import ecdsa
|
||||
|
||||
from lbrynet.wallet.transaction import Transaction, Output
|
||||
|
@ -80,13 +78,16 @@ class ChannelCommands(CommandTestCase):
|
|||
'homepage_url': "https://co.ol",
|
||||
'cover_url': "https://co.ol/cover.png",
|
||||
}
|
||||
fixed_values = values.copy()
|
||||
del fixed_values['language']
|
||||
fixed_values['languages'] = [{'language': 'en'}]
|
||||
|
||||
# create new channel with all fields set
|
||||
tx = await self.out(self.channel_create('@bigchannel', **values))
|
||||
txo = tx['outputs'][0]
|
||||
self.assertEqual(
|
||||
txo['value']['channel'],
|
||||
{'public_key': txo['value']['channel']['public_key'], **values}
|
||||
{'public_key': txo['value']['channel']['public_key'], **fixed_values}
|
||||
)
|
||||
|
||||
# create channel with nothing set
|
||||
|
@ -110,20 +111,20 @@ class ChannelCommands(CommandTestCase):
|
|||
# update channel setting all fields
|
||||
tx = await self.out(self.channel_update(claim_id, **values))
|
||||
txo = tx['outputs'][0]
|
||||
values['public_key'] = public_key
|
||||
values['tags'].insert(0, 'blah') # existing tag
|
||||
fixed_values['public_key'] = public_key
|
||||
fixed_values['tags'].insert(0, 'blah') # existing tag
|
||||
self.assertEqual(
|
||||
txo['value']['channel'],
|
||||
values
|
||||
fixed_values
|
||||
)
|
||||
|
||||
# clearing and settings tags
|
||||
tx = await self.out(self.channel_update(claim_id, tags='single', clear_tags=True))
|
||||
txo = tx['outputs'][0]
|
||||
values['tags'] = ['single']
|
||||
fixed_values['tags'] = ['single']
|
||||
self.assertEqual(
|
||||
txo['value']['channel'],
|
||||
values
|
||||
fixed_values
|
||||
)
|
||||
|
||||
# reset signing key
|
||||
|
@ -131,7 +132,7 @@ class ChannelCommands(CommandTestCase):
|
|||
txo = tx['outputs'][0]
|
||||
self.assertNotEqual(
|
||||
txo['value']['channel']['public_key'],
|
||||
values['public_key']
|
||||
fixed_values['public_key']
|
||||
)
|
||||
|
||||
# send channel to someone else
|
||||
|
@ -278,18 +279,20 @@ class StreamCommands(CommandTestCase):
|
|||
'video_width': 800,
|
||||
'video_height': 600
|
||||
}
|
||||
fixed_values = values.copy()
|
||||
del fixed_values['language']
|
||||
fixed_values['languages'] = [{'language': 'en'}]
|
||||
|
||||
# create new channel with all fields set
|
||||
tx = await self.out(self.stream_create('big', **values))
|
||||
txo = tx['outputs'][0]
|
||||
stream = txo['value']['stream']
|
||||
fixed_values = values.copy()
|
||||
fixed_values['hash'] = stream['hash']
|
||||
fixed_values['sd_hash'] = stream['sd_hash']
|
||||
fixed_values['file'] = stream['file']
|
||||
fixed_values['media_type'] = 'application/octet-stream'
|
||||
fixed_values['release_time'] = str(values['release_time'])
|
||||
fixed_values['fee'] = {
|
||||
'address': base64.b64encode(Base58.decode(fixed_values.pop('fee_address'))).decode(),
|
||||
'address': fixed_values.pop('fee_address'),
|
||||
'amount': fixed_values.pop('fee_amount').replace('.', ''),
|
||||
'currency': fixed_values.pop('fee_currency').upper()
|
||||
}
|
||||
|
@ -306,7 +309,7 @@ class StreamCommands(CommandTestCase):
|
|||
txo['value']['stream'], {
|
||||
'file': {'size': '3'},
|
||||
'media_type': 'application/octet-stream',
|
||||
'hash': txo['value']['stream']['hash']
|
||||
'sd_hash': txo['value']['stream']['sd_hash']
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -314,12 +317,12 @@ class StreamCommands(CommandTestCase):
|
|||
tx = await self.out(self.stream_create('updated', tags='blah'))
|
||||
txo = tx['outputs'][0]
|
||||
claim_id = txo['claim_id']
|
||||
fixed_values['hash'] = txo['value']['stream']['hash']
|
||||
fixed_values['sd_hash'] = txo['value']['stream']['sd_hash']
|
||||
self.assertEqual(
|
||||
txo['value']['stream'], {
|
||||
'file': {'size': '3'},
|
||||
'media_type': 'application/octet-stream',
|
||||
'hash': fixed_values['hash'],
|
||||
'sd_hash': fixed_values['sd_hash'],
|
||||
'tags': ['blah']
|
||||
}
|
||||
)
|
||||
|
|
|
@ -20,11 +20,11 @@ class TestOldJSONSchemaCompatibility(TestCase):
|
|||
self.assertEqual(stream.description, '10MB test file to measure download speed on Lbry p2p-network.')
|
||||
self.assertEqual(stream.license, 'None')
|
||||
self.assertEqual(stream.author, 'root')
|
||||
self.assertEqual(stream.language, 'English')
|
||||
self.assertEqual(stream.language, 'en')
|
||||
self.assertEqual(stream.media_type, 'application/octet-stream')
|
||||
self.assertEqual(stream.thumbnail_url, '/home/robert/lbry/speed.jpg')
|
||||
self.assertEqual(
|
||||
stream.hash,
|
||||
stream.sd_hash,
|
||||
'bbd1f68374ff9a1044a90d7dd578ce41979211c386caf19e'
|
||||
'6f496536db5f2c96b58fe2c7a6677b331419a117873b539f'
|
||||
)
|
||||
|
@ -54,7 +54,7 @@ class TestOldJSONSchemaCompatibility(TestCase):
|
|||
self.assertEqual(stream.language, 'en')
|
||||
self.assertEqual(stream.media_type, 'application/x-msdownload')
|
||||
self.assertEqual(
|
||||
stream.hash,
|
||||
stream.sd_hash,
|
||||
'c5ffee0fa5168e16681b519d9d85446e8d1d818a616bd555'
|
||||
'40aa7374d2321b51abf2ac3dae1443a03dadcc8f7affaa62'
|
||||
)
|
||||
|
@ -80,7 +80,7 @@ class TestOldJSONSchemaCompatibility(TestCase):
|
|||
self.assertEqual(stream.language, 'en')
|
||||
self.assertEqual(stream.media_type, 'video/mp4')
|
||||
self.assertEqual(
|
||||
stream.hash,
|
||||
stream.sd_hash,
|
||||
'd83db664c6d7d570aa824300f4869e0bfb560e765efa477a'
|
||||
'ebf566467f8d3a57f4f8c704cab1308eb75ff8b7e84e3caf'
|
||||
)
|
||||
|
@ -124,7 +124,7 @@ class TestTypesV1Compatibility(TestCase):
|
|||
self.assertEqual(stream.media_type, 'video/mp4')
|
||||
self.assertEqual(stream.thumbnail_url, 'https://berk.ninja/thumbnails/FrTdBCOS_fc')
|
||||
self.assertEqual(
|
||||
stream.hash,
|
||||
stream.sd_hash,
|
||||
'040e8ac6e89c061f982528c23ad33829fd7146435bf7a4cc'
|
||||
'22f0bff70c4fe0b91fd36da9a375e3e1c171db825bf5d1f3'
|
||||
)
|
||||
|
@ -160,7 +160,7 @@ class TestTypesV1Compatibility(TestCase):
|
|||
self.assertEqual(stream.language, 'en')
|
||||
self.assertEqual(stream.media_type, 'application/x-zip-compressed')
|
||||
self.assertEqual(
|
||||
stream.hash,
|
||||
stream.sd_hash,
|
||||
'1f41eb0312aa7e8a5ce49349bc77d811da975833719d7515'
|
||||
'23b19f123fc3d528d6a94e3446ccddb7b9329f27a9cad7e3'
|
||||
)
|
||||
|
|
|
@ -64,7 +64,7 @@ def get_mock_wallet(sd_hash, storage, balance=10.0, fee=None):
|
|||
claim_obj.stream.fee.usd = Decimal(fee['amount'])
|
||||
claim_obj.stream.title = "33rpm"
|
||||
claim_obj.stream.language = "en"
|
||||
claim_obj.stream.hash = sd_hash
|
||||
claim_obj.stream.sd_hash = sd_hash
|
||||
claim_obj.stream.media_type = "image/png"
|
||||
claim['value'] = claim_obj
|
||||
claim['hex'] = binascii.hexlify(claim_obj.to_bytes()).decode()
|
||||
|
|
Loading…
Reference in a new issue