Merge pull request #3028 from lbryio/limit_claims_per_channel

added `--limit_claims_per_channel` argument to `claim_search` to only return up to the specified number of claims per channel
This commit is contained in:
Lex Berezhny 2020-08-19 15:22:53 -04:00 committed by GitHub
commit da391bcc8d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 36 additions and 6 deletions

View file

@ -2310,6 +2310,7 @@ class Daemon(metaclass=JSONRPCServerType):
[--channel=<channel> |
[[--channel_ids=<channel_ids>...] [--not_channel_ids=<not_channel_ids>...]]]
[--has_channel_signature] [--valid_channel_signature | --invalid_channel_signature]
[--limit_claims_per_channel=<limit_claims_per_channel>]
[--is_controlling] [--release_time=<release_time>] [--public_key_id=<public_key_id>]
[--timestamp=<timestamp>] [--creation_timestamp=<creation_timestamp>]
[--height=<height>] [--creation_height=<creation_height>]
@ -2358,6 +2359,8 @@ class Daemon(metaclass=JSONRPCServerType):
--invalid_channel_signature : (bool) claims with invalid channel signature or no signature,
use in conjunction with --has_channel_signature to
only get claims with invalid signatures
--limit_claims_per_channel=<limit_claims_per_channel>: (int) only return up to the specified
number of claims per channel
--is_controlling : (bool) winning claims of their respective name
--public_key_id=<public_key_id> : (str) only return channels having this public key id, this is
the same key as used in the wallet file to map

View file

@ -25,12 +25,14 @@ def set_reference(reference, claim_hash, rows):
class Censor:
__slots__ = 'streams', 'channels', 'censored', 'total'
__slots__ = 'streams', 'channels', 'limit_claims_per_channel', 'censored', 'claims_in_channel', 'total'
def __init__(self, streams: dict = None, channels: dict = None):
def __init__(self, streams: dict = None, channels: dict = None, limit_claims_per_channel: int = None):
self.streams = streams or {}
self.channels = channels or {}
self.limit_claims_per_channel = limit_claims_per_channel # doesn't count as censored
self.censored = {}
self.claims_in_channel = {}
self.total = 0
def censor(self, row) -> bool:
@ -49,6 +51,11 @@ class Censor:
break
if was_censored:
self.total += 1
if not was_censored and self.limit_claims_per_channel is not None and row['channel_hash']:
self.claims_in_channel.setdefault(row['channel_hash'], 0)
self.claims_in_channel[row['channel_hash']] += 1
if self.claims_in_channel[row['channel_hash']] > self.limit_claims_per_channel:
return True
return was_censored
def to_message(self, outputs: OutputsMessage, extra_txo_rows):

View file

@ -39,7 +39,7 @@ ATTRIBUTE_ARRAY_MAX_LENGTH = 100
INTEGER_PARAMS = {
'height', 'creation_height', 'activation_height', 'expiration_height',
'timestamp', 'creation_timestamp', 'duration', 'release_time', 'fee_amount',
'tx_position', 'channel_join', 'reposted',
'tx_position', 'channel_join', 'reposted', 'limit_claims_per_channel',
'amount', 'effective_amount', 'support_amount',
'trending_group', 'trending_mixed',
'trending_local', 'trending_global',
@ -96,8 +96,8 @@ class ReaderState:
def get_resolve_censor(self) -> Censor:
return Censor(self.blocked_streams, self.blocked_channels)
def get_search_censor(self) -> Censor:
return Censor(self.filtered_streams, self.filtered_channels)
def get_search_censor(self, limit_claims_per_channel: int) -> Censor:
return Censor(self.filtered_streams, self.filtered_channels, limit_claims_per_channel)
ctx: ContextVar[Optional[ReaderState]] = ContextVar('ctx')
@ -421,12 +421,13 @@ def search(constraints) -> Tuple[List, List, int, int, Censor]:
assert set(constraints).issubset(SEARCH_PARAMS), \
f"Search query contains invalid arguments: {set(constraints).difference(SEARCH_PARAMS)}"
total = None
limit_claims_per_channel = constraints.pop('limit_claims_per_channel', None)
if not constraints.pop('no_totals', False):
total = count_claims(**constraints)
constraints['offset'] = abs(constraints.get('offset', 0))
constraints['limit'] = min(abs(constraints.get('limit', 10)), 50)
context = ctx.get()
search_censor = context.get_search_censor()
search_censor = context.get_search_censor(limit_claims_per_channel)
txo_rows = search_claims(search_censor, **constraints)
extra_txo_rows = _get_referenced_rows(txo_rows, search_censor.censored.keys())
return txo_rows, extra_txo_rows, constraints['offset'], total, search_censor

View file

@ -319,6 +319,25 @@ class ClaimSearchCommand(ClaimTestCase):
not_channel_ids=[chan2_id], has_channel_signature=True, valid_channel_signature=True)
await match([], not_channel_ids=[chan1_id, chan2_id], has_channel_signature=True, valid_channel_signature=True)
async def test_limit_claims_per_channel(self):
match = self.assertFindsClaims
chan1_id = self.get_claim_id(await self.channel_create('@chan1'))
chan2_id = self.get_claim_id(await self.channel_create('@chan2'))
claim1 = await self.stream_create('claim1')
claim2 = await self.stream_create('claim2', channel_id=chan1_id)
claim3 = await self.stream_create('claim3', channel_id=chan1_id)
claim4 = await self.stream_create('claim4', channel_id=chan1_id)
claim5 = await self.stream_create('claim5', channel_id=chan2_id)
claim6 = await self.stream_create('claim6', channel_id=chan2_id)
await match(
[claim6, claim5, claim4, claim3, claim1],
limit_claims_per_channel=2, claim_type='stream'
)
await match(
[claim6, claim5, claim4, claim3, claim2, claim1],
limit_claims_per_channel=3, claim_type='stream'
)
async def test_claim_type_and_media_type_search(self):
# create an invalid/unknown claim
address = await self.account.receiving.get_or_create_usable_address()