forked from LBRYCommunity/lbry-sdk
protobuf resolve response
This commit is contained in:
parent
8a3b960a85
commit
ee3db31541
9 changed files with 75 additions and 65 deletions
|
@ -276,8 +276,11 @@ class Database:
|
||||||
async def search_supports(self, **constraints) -> Result[Output]:
|
async def search_supports(self, **constraints) -> Result[Output]:
|
||||||
return await self.fetch_result(q.search_supports, **constraints)
|
return await self.fetch_result(q.search_supports, **constraints)
|
||||||
|
|
||||||
async def resolve(self, *urls) -> Dict[str, Output]:
|
async def resolve(self, urls, **kwargs) -> Dict[str, Output]:
|
||||||
return await self.run(q.resolve, *urls)
|
return await self.run(q.resolve, urls, **kwargs)
|
||||||
|
|
||||||
|
async def protobuf_resolve(self, urls, **kwargs) -> bytes:
|
||||||
|
return await self.run(q.protobuf_resolve, urls, **kwargs)
|
||||||
|
|
||||||
async def get_txo_sum(self, **constraints) -> int:
|
async def get_txo_sum(self, **constraints) -> int:
|
||||||
return await self.run(q.get_txo_sum, **constraints)
|
return await self.run(q.get_txo_sum, **constraints)
|
||||||
|
|
|
@ -4,6 +4,7 @@ from operator import itemgetter
|
||||||
from typing import List, Dict
|
from typing import List, Dict
|
||||||
|
|
||||||
from lbry.schema.url import URL
|
from lbry.schema.url import URL
|
||||||
|
from lbry.schema.result import Outputs as ResultOutput
|
||||||
from lbry.error import ResolveCensoredError
|
from lbry.error import ResolveCensoredError
|
||||||
from lbry.blockchain.transaction import Output
|
from lbry.blockchain.transaction import Output
|
||||||
|
|
||||||
|
@ -35,7 +36,11 @@ def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]):
|
||||||
return channel_txos + reposted_txos
|
return channel_txos + reposted_txos
|
||||||
|
|
||||||
|
|
||||||
def resolve(*urls) -> Dict[str, Output]:
|
def protobuf_resolve(urls, **kwargs) -> bytes:
|
||||||
|
return ResultOutput.to_bytes([resolve_url(raw_url) for raw_url in urls], [])
|
||||||
|
|
||||||
|
|
||||||
|
def resolve(urls, **kwargs) -> Dict[str, Output]:
|
||||||
return {url: resolve_url(url) for url in urls}
|
return {url: resolve_url(url) for url in urls}
|
||||||
#txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
#txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
||||||
#extra_txo_rows = _get_referenced_rows(
|
#extra_txo_rows = _get_referenced_rows(
|
||||||
|
|
|
@ -63,6 +63,7 @@ BASE_SELECT_CLAIM_COLUMNS = BASE_SELECT_TXO_COLUMNS + [
|
||||||
Claim.c.creation_height,
|
Claim.c.creation_height,
|
||||||
Claim.c.is_controlling,
|
Claim.c.is_controlling,
|
||||||
Claim.c.channel_hash,
|
Claim.c.channel_hash,
|
||||||
|
Claim.c.reposted_count,
|
||||||
Claim.c.reposted_claim_hash,
|
Claim.c.reposted_claim_hash,
|
||||||
Claim.c.short_url,
|
Claim.c.short_url,
|
||||||
Claim.c.signed_claim_count,
|
Claim.c.signed_claim_count,
|
||||||
|
|
|
@ -372,6 +372,7 @@ META_ATTRS = (
|
||||||
'activation_height', 'takeover_height', 'creation_height', 'staked_amount',
|
'activation_height', 'takeover_height', 'creation_height', 'staked_amount',
|
||||||
'short_url', 'canonical_url', 'staked_support_amount', 'staked_support_count',
|
'short_url', 'canonical_url', 'staked_support_amount', 'staked_support_count',
|
||||||
'signed_claim_count', 'signed_support_count', 'is_signature_valid',
|
'signed_claim_count', 'signed_support_count', 'is_signature_valid',
|
||||||
|
'reposted_count',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -181,6 +181,7 @@ Claim = Table(
|
||||||
|
|
||||||
# reposts
|
# reposts
|
||||||
Column('reposted_claim_hash', LargeBinary, nullable=True),
|
Column('reposted_claim_hash', LargeBinary, nullable=True),
|
||||||
|
Column('reposted_count', Integer, server_default='0'),
|
||||||
|
|
||||||
# claims which are channels
|
# claims which are channels
|
||||||
Column('signed_claim_count', Integer, server_default='0'),
|
Column('signed_claim_count', Integer, server_default='0'),
|
||||||
|
|
|
@ -185,26 +185,26 @@ class Outputs:
|
||||||
txo_message.error.code = ErrorMessage.BLOCKED
|
txo_message.error.code = ErrorMessage.BLOCKED
|
||||||
set_reference(txo_message.error.blocked.channel, txo.censor_hash, extra_txo_rows)
|
set_reference(txo_message.error.blocked.channel, txo.censor_hash, extra_txo_rows)
|
||||||
return
|
return
|
||||||
txo_message.tx_hash = txo['txo_hash'][:32]
|
txo_message.tx_hash = txo.tx_ref.hash
|
||||||
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
|
txo_message.nout = txo.position
|
||||||
txo_message.height = txo['height']
|
txo_message.height = txo.tx_ref.height
|
||||||
txo_message.claim.short_url = txo['short_url']
|
txo_message.claim.short_url = txo.meta['short_url']
|
||||||
txo_message.claim.reposted = txo['reposted']
|
txo_message.claim.reposted = txo.meta['reposted_count']
|
||||||
if txo['canonical_url'] is not None:
|
if txo.meta['canonical_url'] is not None:
|
||||||
txo_message.claim.canonical_url = txo['canonical_url']
|
txo_message.claim.canonical_url = txo.meta['canonical_url']
|
||||||
txo_message.claim.is_controlling = bool(txo['is_controlling'])
|
txo_message.claim.is_controlling = bool(txo.meta['takeover_height'])
|
||||||
if txo['last_take_over_height'] is not None:
|
#if txo['last_take_over_height'] is not None:
|
||||||
txo_message.claim.take_over_height = txo['last_take_over_height']
|
# txo_message.claim.take_over_height = txo['last_take_over_height']
|
||||||
txo_message.claim.creation_height = txo['creation_height']
|
txo_message.claim.creation_height = txo.meta['creation_height']
|
||||||
txo_message.claim.activation_height = txo['activation_height']
|
txo_message.claim.activation_height = txo.meta['activation_height']
|
||||||
txo_message.claim.expiration_height = txo['expiration_height']
|
#txo_message.claim.expiration_height = txo['expiration_height']
|
||||||
if txo['claims_in_channel'] is not None:
|
if txo.meta['signed_claim_count'] is not None:
|
||||||
txo_message.claim.claims_in_channel = txo['claims_in_channel']
|
txo_message.claim.claims_in_channel = txo.meta['signed_claim_count']
|
||||||
txo_message.claim.effective_amount = txo['effective_amount']
|
txo_message.claim.effective_amount = txo.meta['staked_amount']
|
||||||
txo_message.claim.support_amount = txo['support_amount']
|
txo_message.claim.support_amount = txo.meta['staked_support_amount']
|
||||||
txo_message.claim.trending_group = txo['trending_group']
|
#txo_message.claim.trending_group = txo['trending_group']
|
||||||
txo_message.claim.trending_mixed = txo['trending_mixed']
|
#txo_message.claim.trending_mixed = txo['trending_mixed']
|
||||||
txo_message.claim.trending_local = txo['trending_local']
|
#txo_message.claim.trending_local = txo['trending_local']
|
||||||
txo_message.claim.trending_global = txo['trending_global']
|
#txo_message.claim.trending_global = txo['trending_global']
|
||||||
set_reference(txo_message.claim.channel, txo['channel_hash'], extra_txo_rows)
|
#set_reference(txo_message.claim.channel, txo['channel_hash'], extra_txo_rows)
|
||||||
set_reference(txo_message.claim.repost, txo['reposted_claim_hash'], extra_txo_rows)
|
#set_reference(txo_message.claim.repost, txo['reposted_claim_hash'], extra_txo_rows)
|
||||||
|
|
|
@ -502,8 +502,9 @@ class API:
|
||||||
# of supports you've made to this claim
|
# of supports you've made to this claim
|
||||||
include_sent_tips=False, # lookup and sum the total amount
|
include_sent_tips=False, # lookup and sum the total amount
|
||||||
# of tips you've made to this claim
|
# of tips you've made to this claim
|
||||||
include_received_tips=False # lookup and sum the total amount
|
include_received_tips=False, # lookup and sum the total amount
|
||||||
# of tips you've received to this claim
|
# of tips you've received to this claim
|
||||||
|
protobuf=False, # protobuf encoded result
|
||||||
) -> dict: # resolve results, keyed by url
|
) -> dict: # resolve results, keyed by url
|
||||||
"""
|
"""
|
||||||
Get the claim that a URL refers to.
|
Get the claim that a URL refers to.
|
||||||
|
@ -515,6 +516,7 @@ class API:
|
||||||
[--include_sent_supports]
|
[--include_sent_supports]
|
||||||
[--include_sent_tips]
|
[--include_sent_tips]
|
||||||
[--include_received_tips]
|
[--include_received_tips]
|
||||||
|
[--protobuf]
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
'<url>': {
|
'<url>': {
|
||||||
|
@ -573,6 +575,8 @@ class API:
|
||||||
"""
|
"""
|
||||||
if isinstance(urls, str):
|
if isinstance(urls, str):
|
||||||
urls = [urls]
|
urls = [urls]
|
||||||
|
if protobuf:
|
||||||
|
return await self.service.protobuf_resolve(urls)
|
||||||
return await self.service.resolve(
|
return await self.service.resolve(
|
||||||
urls, wallet=None,#self.wallets.get_or_default(wallet_id),
|
urls, wallet=None,#self.wallets.get_or_default(wallet_id),
|
||||||
include_purchase_receipt=include_purchase_receipt,
|
include_purchase_receipt=include_purchase_receipt,
|
||||||
|
@ -1968,53 +1972,45 @@ class API:
|
||||||
{kwargs}
|
{kwargs}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
stream_dict, kwargs = pop_kwargs('stream', stream_kwargs(**stream_and_tx_kwargs))
|
||||||
|
tx_dict, kwargs = pop_kwargs('tx', tx_kwargs(**kwargs))
|
||||||
|
assert_consumed_kwargs(kwargs)
|
||||||
self.ledger.valid_stream_name_or_error(name)
|
self.ledger.valid_stream_name_or_error(name)
|
||||||
wallet = self.wallets.get_or_default_for_spending(wallet_id)
|
wallet = self.wallets.get_or_default_for_spending(tx_dict.pop('wallet_id'))
|
||||||
amount = self.ledger.get_dewies_or_error('bid', bid, positive_value=True)
|
amount = self.ledger.get_dewies_or_error('bid', bid, positive_value=True)
|
||||||
holding_account = wallet.accounts.get_or_default(account_id)
|
holding_account = wallet.accounts.get_or_default(stream_dict.pop('account_id'))
|
||||||
funding_accounts = wallet.accounts.get_or_all(fund_account_id)
|
funding_accounts = wallet.accounts.get_or_all(tx_dict.pop('fund_account_id'))
|
||||||
channel = await wallet.channels.get_for_signing_or_none(claim_id=channel_id, claim_name=channel_name)
|
signing_channel = None
|
||||||
holding_address = await holding_account.get_valid_receiving_address(claim_address)
|
if 'channel_id' in stream_dict or 'channel_name' in stream_dict:
|
||||||
kwargs['fee_address'] = self.ledger.get_fee_address(kwargs, claim_address)
|
signing_channel = await wallet.channels.get_for_signing_or_none(
|
||||||
|
channel_id=stream_dict.pop('channel_id', None),
|
||||||
|
channel_name=stream_dict.pop('channel_name', None)
|
||||||
|
)
|
||||||
|
holding_address = await holding_account.get_valid_receiving_address(
|
||||||
|
stream_dict.pop('claim_address', None)
|
||||||
|
)
|
||||||
|
kwargs['fee_address'] = self.ledger.get_fee_address(kwargs, holding_address)
|
||||||
|
|
||||||
await wallet.verify_duplicate(name, allow_duplicate_name)
|
await wallet.verify_duplicate(name, allow_duplicate_name)
|
||||||
|
|
||||||
|
stream_dict.pop('validate_file')
|
||||||
|
stream_dict.pop('optimize_file')
|
||||||
# TODO: fix
|
# TODO: fix
|
||||||
#file_path, spec = await self._video_file_analyzer.verify_or_repair(
|
#file_path, spec = await self._video_file_analyzer.verify_or_repair(
|
||||||
# validate_file, optimize_file, file_path, ignore_non_video=True
|
# validate_file, optimize_file, file_path, ignore_non_video=True
|
||||||
#)
|
#)
|
||||||
#kwargs.update(spec)
|
#kwargs.update(spec)
|
||||||
|
class FakeManagedStream:
|
||||||
tx = await wallet.stream.create(
|
sd_hash = 'beef'
|
||||||
name,
|
async def create_file_stream(path):
|
||||||
|
return FakeManagedStream()
|
||||||
|
tx, fs = await wallet.streams.create(
|
||||||
|
name=name, amount=amount, file_path=stream_dict.pop('file_path'),
|
||||||
|
create_file_stream=create_file_stream,
|
||||||
|
holding_address=holding_address, funding_accounts=funding_accounts,
|
||||||
|
signing_channel=signing_channel, **remove_nulls(stream_dict)
|
||||||
)
|
)
|
||||||
claim = Claim()
|
await self.service.maybe_broadcast_or_release(tx, tx_dict['preview'], tx_dict['no_wait'])
|
||||||
claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs)
|
|
||||||
tx = await wallet.streams.create(
|
|
||||||
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
||||||
)
|
|
||||||
new_txo = tx.outputs[0]
|
|
||||||
|
|
||||||
file_stream = None
|
|
||||||
if not preview:
|
|
||||||
file_stream = await self.stream_manager.create_stream(file_path)
|
|
||||||
claim.stream.source.sd_hash = file_stream.sd_hash
|
|
||||||
new_txo.script.generate()
|
|
||||||
|
|
||||||
if channel:
|
|
||||||
new_txo.sign(channel)
|
|
||||||
await tx.sign(funding_accounts)
|
|
||||||
|
|
||||||
if not preview:
|
|
||||||
await self.broadcast_or_release(tx, blocking)
|
|
||||||
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
||||||
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
|
|
||||||
)])
|
|
||||||
await self.storage.save_content_claim(file_stream.stream_hash, new_txo.id)
|
|
||||||
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
||||||
else:
|
|
||||||
await account.ledger.release_tx(tx)
|
|
||||||
|
|
||||||
return tx
|
return tx
|
||||||
|
|
||||||
async def stream_update(
|
async def stream_update(
|
||||||
|
|
|
@ -55,4 +55,7 @@ class FullNode(Service):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def resolve(self, urls, **kwargs):
|
async def resolve(self, urls, **kwargs):
|
||||||
return await self.db.resolve(*urls)
|
return await self.db.resolve(urls, **kwargs)
|
||||||
|
|
||||||
|
async def protobuf_resolve(self, urls, **kwargs):
|
||||||
|
return await self.db.protobuf_resolve(urls, **kwargs)
|
||||||
|
|
|
@ -187,7 +187,7 @@ class JSONResponseEncoder(JSONEncoder):
|
||||||
if isinstance(obj, Decimal):
|
if isinstance(obj, Decimal):
|
||||||
return float(obj)
|
return float(obj)
|
||||||
if isinstance(obj, bytes):
|
if isinstance(obj, bytes):
|
||||||
return obj.decode()
|
return hexlify(obj).decode()
|
||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
||||||
def encode_transaction(self, tx):
|
def encode_transaction(self, tx):
|
||||||
|
|
Loading…
Reference in a new issue