forked from LBRYCommunity/lbry-sdk
full reposted_claim data returned for claim_search and resolve
This commit is contained in:
parent
423b48866f
commit
fd632392d4
11 changed files with 210 additions and 152 deletions
|
@ -2061,15 +2061,17 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
"""
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
def jsonrpc_claim_list(self, account_id=None, wallet_id=None, page=None, page_size=None):
|
||||
def jsonrpc_claim_list(self, claim_type=None, account_id=None, wallet_id=None, page=None, page_size=None):
|
||||
"""
|
||||
List my stream and channel claims.
|
||||
|
||||
Usage:
|
||||
claim_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
||||
claim_list [--claim_type=<claim_type>]
|
||||
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
||||
[--page=<page>] [--page_size=<page_size>]
|
||||
|
||||
Options:
|
||||
--claim_type=<claim_type> : (str) claim type: channel, stream, repost, collection
|
||||
--account_id=<account_id> : (str) id of the account to query
|
||||
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
||||
--page=<page> : (int) page to return during paginating
|
||||
|
@ -2085,7 +2087,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
else:
|
||||
claims = partial(self.ledger.get_claims, wallet=wallet, accounts=wallet.accounts)
|
||||
claim_count = partial(self.ledger.get_claim_count, wallet=wallet, accounts=wallet.accounts)
|
||||
return paginate_rows(claims, claim_count, page, page_size)
|
||||
return paginate_rows(claims, claim_count, page, page_size, claim_type=claim_type)
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
async def jsonrpc_claim_search(self, **kwargs):
|
||||
|
@ -2110,6 +2112,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
[--support_amount=<support_amount>] [--trending_group=<trending_group>]
|
||||
[--trending_mixed=<trending_mixed>] [--trending_local=<trending_local>]
|
||||
[--trending_global=<trending_global]
|
||||
[--reposted_claim_id=<reposted_claim_id>] [--reposted=<reposted>]
|
||||
[--claim_type=<claim_type>] [--stream_types=<stream_types>...] [--media_types=<media_types>...]
|
||||
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>]
|
||||
[--any_tags=<any_tags>...] [--all_tags=<all_tags>...] [--not_tags=<not_tags>...]
|
||||
|
@ -2190,6 +2193,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
--trending_global=<trending_global>: (int) trending value calculated relative to all
|
||||
trending content globally (supports
|
||||
equality constraints)
|
||||
--reposted_claim_id=<reposted_claim_id>: (str) all reposts of the specified original claim id
|
||||
--reposted=<reposted> : (int) claims reposted this many times (supports
|
||||
equality constraints)
|
||||
--claim_type=<claim_type> : (str) filter by 'channel', 'stream' or 'unknown'
|
||||
--stream_types=<stream_types> : (list) filter by 'video', 'image', 'document', etc
|
||||
--media_types=<media_types> : (list) filter by 'video/mp4', 'image/png', etc
|
||||
|
@ -2345,8 +2351,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
txo = tx.outputs[0]
|
||||
txo.generate_channel_private_key()
|
||||
|
||||
if not preview:
|
||||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
account.add_channel_private_key(txo.private_key)
|
||||
wallet.save()
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
|
@ -2500,8 +2507,9 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
new_txo.script.generate()
|
||||
|
||||
if not preview:
|
||||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
account.add_channel_private_key(new_txo.private_key)
|
||||
wallet.save()
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
|
@ -2818,8 +2826,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
@requires(WALLET_COMPONENT, STREAM_MANAGER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT)
|
||||
async def jsonrpc_stream_repost(self, name, bid, claim_id, allow_duplicate_name=False, channel_id=None,
|
||||
channel_name=None, channel_account_id=None, account_id=None, wallet_id=None,
|
||||
claim_address=None, funding_account_ids=None, preview=False, blocking=False,
|
||||
**kwargs):
|
||||
claim_address=None, funding_account_ids=None, preview=False, blocking=False):
|
||||
"""
|
||||
Creates a claim that references an existing stream by its claim id.
|
||||
|
||||
|
@ -2875,16 +2882,13 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
)
|
||||
new_txo = tx.outputs[0]
|
||||
|
||||
if not preview:
|
||||
new_txo.script.generate()
|
||||
|
||||
if channel:
|
||||
new_txo.sign(channel)
|
||||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
# await self.analytics_manager.send_claim_action('publish') todo: what to send?
|
||||
await self.analytics_manager.send_claim_action('publish')
|
||||
else:
|
||||
await account.ledger.release_tx(tx)
|
||||
|
||||
|
@ -3459,6 +3463,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
if channel:
|
||||
new_txo.sign(channel)
|
||||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
await self.analytics_manager.send_claim_action('publish')
|
||||
|
|
|
@ -28,14 +28,16 @@ def encode_txo_doc():
|
|||
'confirmations': "number of confirmed blocks",
|
||||
'is_change': "payment to change address, only available when it can be determined",
|
||||
'is_mine': "payment to one of your accounts, only available when it can be determined",
|
||||
'type': "one of 'claim', 'support' or 'payment'",
|
||||
'type': "one of 'claim', 'support' or 'purchase'",
|
||||
'name': "when type is 'claim' or 'support', this is the claim name",
|
||||
'claim_id': "when type is 'claim' or 'support', this is the claim id",
|
||||
'claim_id': "when type is 'claim', 'support' or 'purchase', this is the claim id",
|
||||
'claim_op': "when type is 'claim', this determines if it is 'create' or 'update'",
|
||||
'value': "when type is 'claim' or 'support' with payload, this is the decoded protobuf payload",
|
||||
'value_type': "determines the type of the 'value' field: 'channel', 'stream', etc",
|
||||
'protobuf': "hex encoded raw protobuf version of 'value' field",
|
||||
'permanent_url': "when type is 'claim' or 'support', this is the long permanent claim URL",
|
||||
'claim': "for purchase outputs only, metadata of purchased claim",
|
||||
'reposted_claim': "for repost claims only, metadata of claim being reposted",
|
||||
'signing_channel': "for signed claims only, metadata of signing channel",
|
||||
'is_channel_signature_valid': "for signed claims only, whether signature is valid",
|
||||
'purchase_receipt': "metadata for the purchase transaction associated with this claim"
|
||||
|
@ -203,6 +205,8 @@ class JSONResponseEncoder(JSONEncoder):
|
|||
output['canonical_url'] = output['meta'].pop('canonical_url')
|
||||
if txo.claims is not None:
|
||||
output['claims'] = [self.encode_output(o) for o in txo.claims]
|
||||
if txo.reposted_claim is not None:
|
||||
output['reposted_claim'] = self.encode_output(txo.reposted_claim)
|
||||
if txo.script.is_claim_name or txo.script.is_update_claim:
|
||||
try:
|
||||
output['value'] = txo.claim
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import base64
|
||||
import struct
|
||||
from typing import List
|
||||
from typing import List, Optional, Tuple
|
||||
from binascii import hexlify
|
||||
from itertools import chain
|
||||
|
||||
|
@ -33,6 +33,7 @@ class Outputs:
|
|||
txo.meta = {
|
||||
'short_url': f'lbry://{claim.short_url}',
|
||||
'canonical_url': f'lbry://{claim.canonical_url or claim.short_url}',
|
||||
'reposted': claim.reposted,
|
||||
'is_controlling': claim.is_controlling,
|
||||
'take_over_height': claim.take_over_height,
|
||||
'creation_height': claim.creation_height,
|
||||
|
@ -47,6 +48,8 @@ class Outputs:
|
|||
}
|
||||
if claim.HasField('channel'):
|
||||
txo.channel = tx_map[claim.channel.tx_hash].outputs[claim.channel.nout]
|
||||
if claim.HasField('repost'):
|
||||
txo.reposted_claim = tx_map[claim.repost.tx_hash].outputs[claim.repost.nout]
|
||||
try:
|
||||
if txo.claim.is_channel:
|
||||
txo.meta['claims_in_channel'] = claim.claims_in_channel
|
||||
|
@ -80,13 +83,13 @@ class Outputs:
|
|||
if total is not None:
|
||||
page.total = total
|
||||
for row in txo_rows:
|
||||
cls.row_to_message(row, page.txos.add())
|
||||
cls.row_to_message(row, page.txos.add(), extra_txo_rows)
|
||||
for row in extra_txo_rows:
|
||||
cls.row_to_message(row, page.extra_txos.add())
|
||||
cls.row_to_message(row, page.extra_txos.add(), extra_txo_rows)
|
||||
return page.SerializeToString()
|
||||
|
||||
@classmethod
|
||||
def row_to_message(cls, txo, txo_message):
|
||||
def row_to_message(cls, txo, txo_message, extra_txo_rows):
|
||||
if isinstance(txo, Exception):
|
||||
txo_message.error.text = txo.args[0]
|
||||
if isinstance(txo, ValueError):
|
||||
|
@ -98,6 +101,7 @@ class Outputs:
|
|||
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
|
||||
txo_message.height = txo['height']
|
||||
txo_message.claim.short_url = txo['short_url']
|
||||
txo_message.claim.reposted = txo['reposted']
|
||||
if txo['canonical_url'] is not None:
|
||||
txo_message.claim.canonical_url = txo['canonical_url']
|
||||
txo_message.claim.is_controlling = bool(txo['is_controlling'])
|
||||
|
@ -114,8 +118,16 @@ class Outputs:
|
|||
txo_message.claim.trending_mixed = txo['trending_mixed']
|
||||
txo_message.claim.trending_local = txo['trending_local']
|
||||
txo_message.claim.trending_global = txo['trending_global']
|
||||
if txo['channel_txo_hash']:
|
||||
channel = txo_message.claim.channel
|
||||
channel.tx_hash = txo['channel_txo_hash'][:32]
|
||||
channel.nout, = struct.unpack('<I', txo['channel_txo_hash'][32:])
|
||||
channel.height = txo['channel_height']
|
||||
cls.set_reference(txo_message, 'channel', txo['channel_hash'], extra_txo_rows)
|
||||
cls.set_reference(txo_message, 'repost', txo['reposted_claim_hash'], extra_txo_rows)
|
||||
|
||||
@staticmethod
|
||||
def set_reference(message, attr, claim_hash, rows):
|
||||
if claim_hash:
|
||||
for txo in rows:
|
||||
if claim_hash == txo['claim_hash']:
|
||||
reference = getattr(message.claim, attr)
|
||||
reference.tx_hash = txo['txo_hash'][:32]
|
||||
reference.nout = struct.unpack('<I', txo['txo_hash'][32:])[0]
|
||||
reference.height = txo['height']
|
||||
break
|
||||
|
|
|
@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
|
|||
package='pb',
|
||||
syntax='proto3',
|
||||
serialized_options=None,
|
||||
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"b\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\x81\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x02 \x01(\t\x12\x15\n\rcanonical_url\x18\x03 \x01(\t\x12\x16\n\x0eis_controlling\x18\x04 \x01(\x08\x12\x18\n\x10take_over_height\x18\x05 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x06 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x07 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\x08 \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\t \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\n \x01(\x04\x12\x16\n\x0esupport_amount\x18\x0b \x01(\x04\x12\x16\n\x0etrending_group\x18\x0c \x01(\r\x12\x16\n\x0etrending_mixed\x18\r \x01(\x02\x12\x16\n\x0etrending_local\x18\x0e \x01(\x02\x12\x17\n\x0ftrending_global\x18\x0f \x01(\x02\"i\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\"4\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x62\x06proto3')
|
||||
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"b\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xaf\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_group\x18\x16 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x17 \x01(\x02\x12\x16\n\x0etrending_local\x18\x18 \x01(\x02\x12\x17\n\x0ftrending_global\x18\x19 \x01(\x02\"i\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\"4\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x62\x06proto3')
|
||||
)
|
||||
|
||||
|
||||
|
@ -45,8 +45,8 @@ _ERROR_CODE = _descriptor.EnumDescriptor(
|
|||
],
|
||||
containing_type=None,
|
||||
serialized_options=None,
|
||||
serialized_start=686,
|
||||
serialized_end=738,
|
||||
serialized_start=732,
|
||||
serialized_end=784,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_ERROR_CODE)
|
||||
|
||||
|
@ -180,99 +180,113 @@ _CLAIMMETA = _descriptor.Descriptor(
|
|||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='short_url', full_name='pb.ClaimMeta.short_url', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
name='repost', full_name='pb.ClaimMeta.repost', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='canonical_url', full_name='pb.ClaimMeta.canonical_url', index=2,
|
||||
name='short_url', full_name='pb.ClaimMeta.short_url', index=2,
|
||||
number=3, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='is_controlling', full_name='pb.ClaimMeta.is_controlling', index=3,
|
||||
number=4, type=8, cpp_type=7, label=1,
|
||||
name='canonical_url', full_name='pb.ClaimMeta.canonical_url', index=3,
|
||||
number=4, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='is_controlling', full_name='pb.ClaimMeta.is_controlling', index=4,
|
||||
number=5, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='take_over_height', full_name='pb.ClaimMeta.take_over_height', index=4,
|
||||
number=5, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='creation_height', full_name='pb.ClaimMeta.creation_height', index=5,
|
||||
name='take_over_height', full_name='pb.ClaimMeta.take_over_height', index=5,
|
||||
number=6, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='activation_height', full_name='pb.ClaimMeta.activation_height', index=6,
|
||||
name='creation_height', full_name='pb.ClaimMeta.creation_height', index=6,
|
||||
number=7, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='expiration_height', full_name='pb.ClaimMeta.expiration_height', index=7,
|
||||
name='activation_height', full_name='pb.ClaimMeta.activation_height', index=7,
|
||||
number=8, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='claims_in_channel', full_name='pb.ClaimMeta.claims_in_channel', index=8,
|
||||
name='expiration_height', full_name='pb.ClaimMeta.expiration_height', index=8,
|
||||
number=9, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='effective_amount', full_name='pb.ClaimMeta.effective_amount', index=9,
|
||||
number=10, type=4, cpp_type=4, label=1,
|
||||
name='claims_in_channel', full_name='pb.ClaimMeta.claims_in_channel', index=9,
|
||||
number=10, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='support_amount', full_name='pb.ClaimMeta.support_amount', index=10,
|
||||
number=11, type=4, cpp_type=4, label=1,
|
||||
name='reposted', full_name='pb.ClaimMeta.reposted', index=10,
|
||||
number=11, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_group', full_name='pb.ClaimMeta.trending_group', index=11,
|
||||
number=12, type=13, cpp_type=3, label=1,
|
||||
name='effective_amount', full_name='pb.ClaimMeta.effective_amount', index=11,
|
||||
number=20, type=4, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_mixed', full_name='pb.ClaimMeta.trending_mixed', index=12,
|
||||
number=13, type=2, cpp_type=6, label=1,
|
||||
name='support_amount', full_name='pb.ClaimMeta.support_amount', index=12,
|
||||
number=21, type=4, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_group', full_name='pb.ClaimMeta.trending_group', index=13,
|
||||
number=22, type=13, cpp_type=3, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_mixed', full_name='pb.ClaimMeta.trending_mixed', index=14,
|
||||
number=23, type=2, cpp_type=6, label=1,
|
||||
has_default_value=False, default_value=float(0),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_local', full_name='pb.ClaimMeta.trending_local', index=13,
|
||||
number=14, type=2, cpp_type=6, label=1,
|
||||
name='trending_local', full_name='pb.ClaimMeta.trending_local', index=15,
|
||||
number=24, type=2, cpp_type=6, label=1,
|
||||
has_default_value=False, default_value=float(0),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
serialized_options=None, file=DESCRIPTOR),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='trending_global', full_name='pb.ClaimMeta.trending_global', index=14,
|
||||
number=15, type=2, cpp_type=6, label=1,
|
||||
name='trending_global', full_name='pb.ClaimMeta.trending_global', index=16,
|
||||
number=25, type=2, cpp_type=6, label=1,
|
||||
has_default_value=False, default_value=float(0),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
|
@ -290,7 +304,7 @@ _CLAIMMETA = _descriptor.Descriptor(
|
|||
oneofs=[
|
||||
],
|
||||
serialized_start=246,
|
||||
serialized_end=631,
|
||||
serialized_end=677,
|
||||
)
|
||||
|
||||
|
||||
|
@ -328,8 +342,8 @@ _ERROR = _descriptor.Descriptor(
|
|||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=633,
|
||||
serialized_end=738,
|
||||
serialized_start=679,
|
||||
serialized_end=784,
|
||||
)
|
||||
|
||||
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT
|
||||
|
@ -343,6 +357,7 @@ _OUTPUT.oneofs_by_name['meta'].fields.append(
|
|||
_OUTPUT.fields_by_name['error'])
|
||||
_OUTPUT.fields_by_name['error'].containing_oneof = _OUTPUT.oneofs_by_name['meta']
|
||||
_CLAIMMETA.fields_by_name['channel'].message_type = _OUTPUT
|
||||
_CLAIMMETA.fields_by_name['repost'].message_type = _OUTPUT
|
||||
_ERROR.fields_by_name['code'].enum_type = _ERROR_CODE
|
||||
_ERROR_CODE.containing_type = _ERROR
|
||||
DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS
|
||||
|
|
|
@ -204,12 +204,6 @@ class CommandTestCase(IntegrationTestCase):
|
|||
""" Synchronous version of `out` method. """
|
||||
return json.loads(jsonrpc_dumps_pretty(value, ledger=self.ledger))['result']
|
||||
|
||||
def stream_repost(self, claim_id, name='repost', bid='1.0', confirm=True, **kwargs):
|
||||
return self.confirm_and_render(
|
||||
self.daemon.jsonrpc_stream_repost(claim_id=claim_id, name=name, bid=bid, **kwargs),
|
||||
confirm
|
||||
)
|
||||
|
||||
async def confirm_and_render(self, awaitable, confirm) -> Transaction:
|
||||
tx = await awaitable
|
||||
if confirm:
|
||||
|
@ -245,6 +239,11 @@ class CommandTestCase(IntegrationTestCase):
|
|||
self.daemon.jsonrpc_stream_update(claim_id, **kwargs), confirm
|
||||
)
|
||||
|
||||
def stream_repost(self, claim_id, name='repost', bid='1.0', confirm=True, **kwargs):
|
||||
return self.confirm_and_render(
|
||||
self.daemon.jsonrpc_stream_repost(claim_id=claim_id, name=name, bid=bid, **kwargs), confirm
|
||||
)
|
||||
|
||||
async def stream_abandon(self, *args, confirm=True, **kwargs):
|
||||
if 'blocking' not in kwargs:
|
||||
kwargs['blocking'] = False
|
||||
|
@ -307,6 +306,9 @@ class CommandTestCase(IntegrationTestCase):
|
|||
async def file_list(self, *args, **kwargs):
|
||||
return (await self.out(self.daemon.jsonrpc_file_list(*args, **kwargs)))['items']
|
||||
|
||||
async def claim_list(self, *args, **kwargs):
|
||||
return (await self.out(self.daemon.jsonrpc_claim_list(*args, **kwargs)))['items']
|
||||
|
||||
@staticmethod
|
||||
def get_claim_id(tx):
|
||||
return tx['outputs'][0]['claim_id']
|
||||
|
|
|
@ -3,11 +3,13 @@ TXO_TYPES = {
|
|||
"channel": 2,
|
||||
"support": 3,
|
||||
"purchase": 4,
|
||||
"collection": 5
|
||||
"collection": 5,
|
||||
"repost": 6,
|
||||
}
|
||||
|
||||
CLAIM_TYPES = [
|
||||
TXO_TYPES['stream'],
|
||||
TXO_TYPES['channel'],
|
||||
TXO_TYPES['collection'],
|
||||
TXO_TYPES['repost'],
|
||||
]
|
||||
|
|
|
@ -140,6 +140,10 @@ class WalletDatabase(BaseDatabase):
|
|||
|
||||
@staticmethod
|
||||
def constrain_claims(constraints):
|
||||
claim_type = constraints.pop('claim_type', None)
|
||||
if claim_type is not None:
|
||||
constraints['txo_type'] = TXO_TYPES[claim_type]
|
||||
else:
|
||||
constraints['txo_type__in'] = CLAIM_TYPES
|
||||
|
||||
async def get_claims(self, **constraints) -> List[Output]:
|
||||
|
|
|
@ -39,7 +39,7 @@ ATTRIBUTE_ARRAY_MAX_LENGTH = 100
|
|||
INTEGER_PARAMS = {
|
||||
'height', 'creation_height', 'activation_height', 'expiration_height',
|
||||
'timestamp', 'creation_timestamp', 'release_time', 'fee_amount',
|
||||
'tx_position', 'channel_join',
|
||||
'tx_position', 'channel_join', 'reposted',
|
||||
'amount', 'effective_amount', 'support_amount',
|
||||
'trending_group', 'trending_mixed',
|
||||
'trending_local', 'trending_global',
|
||||
|
@ -267,12 +267,13 @@ def _get_claims(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
|
|||
sqlite3.Binary(unhexlify(channel_id)[::-1]) for channel_id in blocklist_ids
|
||||
]
|
||||
constraints.update({
|
||||
f'$blocking_channels{i}': a for i, a in enumerate(blocking_channels)
|
||||
f'$blocking_channel{i}': a for i, a in enumerate(blocking_channels)
|
||||
})
|
||||
blocklist = ', '.join([f':$blocking_channels{i}' for i in range(len(blocking_channels))])
|
||||
constraints['claim.claim_hash__not_in'] = f"""
|
||||
SELECT reposted_claim_hash FROM claim
|
||||
WHERE channel_hash IN ({blocklist})
|
||||
blocklist = ', '.join([
|
||||
f':$blocking_channel{i}' for i in range(len(blocking_channels))
|
||||
])
|
||||
constraints['claim.claim_hash__not_in#blocklist_channel_ids'] = f"""
|
||||
SELECT reposted_claim_hash FROM claim WHERE channel_hash IN ({blocklist})
|
||||
"""
|
||||
if 'signature_valid' in constraints:
|
||||
has_channel_signature = constraints.pop('has_channel_signature', False)
|
||||
|
@ -319,14 +320,9 @@ def _get_claims(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
|
|||
select = f"SELECT {cols} FROM search JOIN claim ON (search.rowid=claim.rowid)"
|
||||
else:
|
||||
select = f"SELECT {cols} FROM claim"
|
||||
|
||||
sql, values = query(
|
||||
select if for_count else select+"""
|
||||
LEFT JOIN claimtrie USING (claim_hash)
|
||||
LEFT JOIN claim as channel ON (claim.channel_hash=channel.claim_hash)
|
||||
""", **constraints
|
||||
)
|
||||
return sql, values
|
||||
if not for_count:
|
||||
select += " LEFT JOIN claimtrie USING (claim_hash)"
|
||||
return query(select, **constraints)
|
||||
|
||||
|
||||
def get_claims(cols, for_count=False, **constraints) -> List:
|
||||
|
@ -350,6 +346,46 @@ def get_claims_count(**constraints) -> int:
|
|||
return count[0][0]
|
||||
|
||||
|
||||
def _search(**constraints):
|
||||
return get_claims(
|
||||
"""
|
||||
claimtrie.claim_hash as is_controlling,
|
||||
claimtrie.last_take_over_height,
|
||||
claim.claim_hash, claim.txo_hash,
|
||||
claim.claims_in_channel, claim.reposted,
|
||||
claim.height, claim.creation_height,
|
||||
claim.activation_height, claim.expiration_height,
|
||||
claim.effective_amount, claim.support_amount,
|
||||
claim.trending_group, claim.trending_mixed,
|
||||
claim.trending_local, claim.trending_global,
|
||||
claim.short_url, claim.canonical_url,
|
||||
claim.channel_hash, claim.reposted_claim_hash,
|
||||
claim.signature_valid
|
||||
""", **constraints
|
||||
)
|
||||
|
||||
|
||||
def _get_referenced_rows(txo_rows: List[sqlite3.Row]):
|
||||
repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows)))
|
||||
channel_hashes = set(filter(None, map(itemgetter('channel_hash'), txo_rows)))
|
||||
|
||||
reposted_txos = []
|
||||
if repost_hashes:
|
||||
reposted_txos = _search(
|
||||
**{'claim.claim_hash__in': [sqlite3.Binary(h) for h in repost_hashes]}
|
||||
)
|
||||
channel_hashes |= set(filter(None, map(itemgetter('channel_hash'), reposted_txos)))
|
||||
|
||||
channel_txos = []
|
||||
if channel_hashes:
|
||||
channel_txos = _search(
|
||||
**{'claim.claim_hash__in': [sqlite3.Binary(h) for h in channel_hashes]}
|
||||
)
|
||||
|
||||
# channels must come first for client side inflation to work properly
|
||||
return channel_txos + reposted_txos
|
||||
|
||||
|
||||
@measure
|
||||
def search(constraints) -> Tuple[List, List, int, int]:
|
||||
assert set(constraints).issubset(SEARCH_PARAMS), \
|
||||
|
@ -362,49 +398,15 @@ def search(constraints) -> Tuple[List, List, int, int]:
|
|||
if 'order_by' not in constraints:
|
||||
constraints['order_by'] = ["claim_hash"]
|
||||
txo_rows = _search(**constraints)
|
||||
channel_hashes = set(filter(None, map(itemgetter('channel_hash'), txo_rows)))
|
||||
extra_txo_rows = []
|
||||
if channel_hashes:
|
||||
extra_txo_rows = _search(
|
||||
**{'claim.claim_hash__in': [sqlite3.Binary(h) for h in channel_hashes]}
|
||||
)
|
||||
extra_txo_rows = _get_referenced_rows(txo_rows)
|
||||
return txo_rows, extra_txo_rows, constraints['offset'], total
|
||||
|
||||
|
||||
def _search(**constraints):
|
||||
return get_claims(
|
||||
"""
|
||||
claimtrie.claim_hash as is_controlling,
|
||||
claimtrie.last_take_over_height,
|
||||
claim.claim_hash, claim.txo_hash,
|
||||
claim.claims_in_channel,
|
||||
claim.height, claim.creation_height,
|
||||
claim.activation_height, claim.expiration_height,
|
||||
claim.effective_amount, claim.support_amount,
|
||||
claim.trending_group, claim.trending_mixed,
|
||||
claim.trending_local, claim.trending_global,
|
||||
claim.short_url, claim.canonical_url, claim.reposted_claim_hash,
|
||||
claim.channel_hash, channel.txo_hash AS channel_txo_hash,
|
||||
channel.height AS channel_height, claim.signature_valid
|
||||
""", **constraints
|
||||
)
|
||||
|
||||
|
||||
@measure
|
||||
def resolve(urls) -> Tuple[List, List]:
|
||||
result = []
|
||||
channel_hashes = set()
|
||||
for raw_url in urls:
|
||||
match = resolve_url(raw_url)
|
||||
result.append(match)
|
||||
if isinstance(match, sqlite3.Row) and match['channel_hash']:
|
||||
channel_hashes.add(match['channel_hash'])
|
||||
extra_txo_rows = []
|
||||
if channel_hashes:
|
||||
extra_txo_rows = _search(
|
||||
**{'claim.claim_hash__in': [sqlite3.Binary(h) for h in channel_hashes]}
|
||||
)
|
||||
return result, extra_txo_rows
|
||||
txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
||||
extra_txo_rows = _get_referenced_rows([r for r in txo_rows if isinstance(r, sqlite3.Row)])
|
||||
return txo_rows, extra_txo_rows
|
||||
|
||||
|
||||
@measure
|
||||
|
|
|
@ -55,6 +55,7 @@ class SQLDB:
|
|||
description text,
|
||||
|
||||
claim_type integer,
|
||||
reposted integer default 0,
|
||||
|
||||
-- streams
|
||||
stream_type text,
|
||||
|
@ -385,6 +386,21 @@ class SQLDB:
|
|||
'support', {'txo_hash__in': [sqlite3.Binary(txo_hash) for txo_hash in txo_hashes]}
|
||||
))
|
||||
|
||||
def calculate_reposts(self, claims: List[Output]):
|
||||
targets = set()
|
||||
for claim in claims:
|
||||
if claim.claim.is_repost:
|
||||
targets.add((claim.claim.repost.reference.claim_hash,))
|
||||
if targets:
|
||||
self.db.executemany(
|
||||
"""
|
||||
UPDATE claim SET reposted = (
|
||||
SELECT count(*) FROM claim AS repost WHERE repost.reposted_claim_hash = claim.claim_hash
|
||||
)
|
||||
WHERE claim_hash = ?
|
||||
""", targets
|
||||
)
|
||||
|
||||
def validate_channel_signatures(self, height, new_claims, updated_claims, spent_claims, affected_channels, timer):
|
||||
if not new_claims and not updated_claims and not spent_claims:
|
||||
return
|
||||
|
@ -716,6 +732,7 @@ class SQLDB:
|
|||
affected_channels = r(self.delete_claims, delete_claim_hashes)
|
||||
r(self.delete_supports, delete_support_txo_hashes)
|
||||
r(self.insert_claims, insert_claims, header)
|
||||
r(self.calculate_reposts, insert_claims)
|
||||
r(update_full_text_search, 'after-insert',
|
||||
[txo.claim_hash for txo in insert_claims], self.db, height, daemon_height, self.main.first_sync)
|
||||
r(update_full_text_search, 'before-update',
|
||||
|
|
|
@ -32,7 +32,7 @@ class Output(BaseOutput):
|
|||
__slots__ = (
|
||||
'channel', 'private_key', 'meta',
|
||||
'purchase', 'purchased_claim', 'purchase_receipt',
|
||||
'claims',
|
||||
'reposted_claim', 'claims',
|
||||
)
|
||||
|
||||
def __init__(self, *args, channel: Optional['Output'] = None,
|
||||
|
@ -43,6 +43,7 @@ class Output(BaseOutput):
|
|||
self.purchase: 'Output' = None # txo containing purchase metadata
|
||||
self.purchased_claim: 'Output' = None # resolved claim pointed to by purchase
|
||||
self.purchase_receipt: 'Output' = None # txo representing purchase receipt for this claim
|
||||
self.reposted_claim: 'Output' = None # txo representing claim being reposted
|
||||
self.claims: List['Output'] = None # resolved claims for collection
|
||||
self.meta = {}
|
||||
|
||||
|
|
|
@ -717,44 +717,38 @@ class StreamCommands(ClaimTestCase):
|
|||
async def test_repost(self):
|
||||
await self.channel_create('@goodies', '1.0')
|
||||
tx = await self.stream_create('newstuff', '1.1', channel_name='@goodies')
|
||||
claim_id = tx['outputs'][0]['claim_id']
|
||||
await self.stream_repost(claim_id, 'newstuff-again', '1.1')
|
||||
claim_list = (await self.out(self.daemon.jsonrpc_claim_list()))['items']
|
||||
reposts_on_claim_list = [claim for claim in claim_list if claim['value_type'] == 'repost']
|
||||
self.assertEqual(len(reposts_on_claim_list), 1)
|
||||
claim_id = self.get_claim_id(tx)
|
||||
|
||||
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 0)
|
||||
|
||||
tx = await self.stream_repost(claim_id, 'newstuff-again', '1.1')
|
||||
repost_id = self.get_claim_id(tx)
|
||||
self.assertItemCount(await self.daemon.jsonrpc_claim_list(claim_type='repost'), 1)
|
||||
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 1)
|
||||
self.assertEqual((await self.claim_search(reposted_claim_id=claim_id))[0]['claim_id'], repost_id)
|
||||
|
||||
await self.channel_create('@reposting-goodies', '1.0')
|
||||
await self.stream_repost(claim_id, 'repost-on-channel', '1.1', channel_name='@reposting-goodies')
|
||||
claim_list = (await self.out(self.daemon.jsonrpc_claim_list()))['items']
|
||||
reposts_on_claim_list = [claim for claim in claim_list if claim['value_type'] == 'repost']
|
||||
self.assertEqual(len(reposts_on_claim_list), 2)
|
||||
signed_reposts = [repost for repost in reposts_on_claim_list if repost.get('is_channel_signature_valid')]
|
||||
self.assertEqual(len(signed_reposts), 1)
|
||||
# check that its directly searchable (simplest case, by name)
|
||||
self.assertItemCount(await self.daemon.jsonrpc_claim_list(claim_type='repost'), 2)
|
||||
self.assertItemCount(await self.daemon.jsonrpc_claim_search(reposted_claim_id=claim_id), 2)
|
||||
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 2)
|
||||
|
||||
search_results = await self.claim_search(reposted='>=2')
|
||||
self.assertEqual(len(search_results), 1)
|
||||
self.assertEqual(search_results[0]['name'], 'newstuff')
|
||||
|
||||
search_results = await self.claim_search(name='repost-on-channel')
|
||||
self.assertEqual(len(search_results), 1)
|
||||
self.assertTrue(
|
||||
any(claim['claim_id'] for claim in reposts_on_claim_list
|
||||
if claim['name'] == 'repost-on-channel' and claim['claim_id'] == search_results[0]['claim_id'])
|
||||
)
|
||||
search_results = await self.claim_search(name='newstuff-again')
|
||||
self.assertEqual(len(search_results), 1)
|
||||
self.assertTrue(
|
||||
any(claim['claim_id'] for claim in reposts_on_claim_list
|
||||
if claim['name'] == 'newstuff-again' and claim['claim_id'] == search_results[0]['claim_id'])
|
||||
)
|
||||
# complex case, reverse search (reposts for claim id)
|
||||
reposts = await self.claim_search(reposted_claim_id=claim_id)
|
||||
self.assertEqual(len(reposts), 2)
|
||||
self.assertSetEqual(
|
||||
{repost['claim_id'] for repost in reposts},
|
||||
{claim['claim_id'] for claim in reposts_on_claim_list}
|
||||
)
|
||||
# check that it resolves fine too
|
||||
resolved_reposts = await self.resolve(['@reposting-goodies/repost-on-channel', 'newstuff-again'])
|
||||
self.assertEqual(
|
||||
[resolution['claim_id'] for resolution in resolved_reposts.values()],
|
||||
[claim['claim_id'] for claim in reposts_on_claim_list]
|
||||
)
|
||||
search = search_results[0]
|
||||
self.assertEqual(search['name'], 'repost-on-channel')
|
||||
self.assertEqual(search['signing_channel']['name'], '@reposting-goodies')
|
||||
self.assertEqual(search['reposted_claim']['name'], 'newstuff')
|
||||
self.assertEqual(search['reposted_claim']['meta']['reposted'], 2)
|
||||
self.assertEqual(search['reposted_claim']['signing_channel']['name'], '@goodies')
|
||||
|
||||
resolved = await self.resolve(['@reposting-goodies/repost-on-channel', 'newstuff-again'])
|
||||
self.assertEqual(resolved['@reposting-goodies/repost-on-channel'], search)
|
||||
self.assertEqual(resolved['newstuff-again']['reposted_claim']['name'], 'newstuff')
|
||||
|
||||
async def test_filtering_channels_for_removing_content(self):
|
||||
await self.channel_create('@badstuff', '1.0')
|
||||
|
|
Loading…
Reference in a new issue