forked from LBRYCommunity/lbry-sdk
FILTERING_CHANNELS_IDS on claim search
This commit is contained in:
parent
56af4c2fcb
commit
04a823c7d0
4 changed files with 37 additions and 9 deletions
|
@ -48,7 +48,7 @@ INTEGER_PARAMS = {
|
||||||
SEARCH_PARAMS = {
|
SEARCH_PARAMS = {
|
||||||
'name', 'text', 'claim_id', 'claim_ids', 'txid', 'nout', 'channel', 'channel_ids', 'not_channel_ids',
|
'name', 'text', 'claim_id', 'claim_ids', 'txid', 'nout', 'channel', 'channel_ids', 'not_channel_ids',
|
||||||
'public_key_id', 'claim_type', 'stream_types', 'media_types', 'fee_currency',
|
'public_key_id', 'claim_type', 'stream_types', 'media_types', 'fee_currency',
|
||||||
'has_channel_signature', 'signature_valid',
|
'has_channel_signature', 'signature_valid', 'blocklist_channel_ids',
|
||||||
'any_tags', 'all_tags', 'not_tags', 'reposted_claim_id',
|
'any_tags', 'all_tags', 'not_tags', 'reposted_claim_id',
|
||||||
'any_locations', 'all_locations', 'not_locations',
|
'any_locations', 'all_locations', 'not_locations',
|
||||||
'any_languages', 'all_languages', 'not_languages',
|
'any_languages', 'all_languages', 'not_languages',
|
||||||
|
@ -260,6 +260,16 @@ def _get_claims(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
|
||||||
'claim.signature_valid__is_null': True,
|
'claim.signature_valid__is_null': True,
|
||||||
'claim.channel_hash__not_in': not_channel_ids_binary
|
'claim.channel_hash__not_in': not_channel_ids_binary
|
||||||
}
|
}
|
||||||
|
if 'blocklist_channel_ids' in constraints:
|
||||||
|
blocklist_ids = constraints.pop('blocklist_channel_ids')
|
||||||
|
if blocklist_ids:
|
||||||
|
not_repost_from_channel_ids = [
|
||||||
|
sqlite3.Binary(unhexlify(channel_id)[::-1]) for channel_id in blocklist_ids
|
||||||
|
]
|
||||||
|
constraints['null_or_not_reposted_by__or'] = {
|
||||||
|
'repost.channel_hash__not_in': not_repost_from_channel_ids,
|
||||||
|
'repost.channel_hash__is_null': True
|
||||||
|
}
|
||||||
if 'signature_valid' in constraints:
|
if 'signature_valid' in constraints:
|
||||||
has_channel_signature = constraints.pop('has_channel_signature', False)
|
has_channel_signature = constraints.pop('has_channel_signature', False)
|
||||||
if has_channel_signature:
|
if has_channel_signature:
|
||||||
|
@ -304,7 +314,7 @@ def _get_claims(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
|
||||||
constraints["order_by"] = FTS_ORDER_BY
|
constraints["order_by"] = FTS_ORDER_BY
|
||||||
select = f"SELECT {cols} FROM search JOIN claim ON (search.rowid=claim.rowid)"
|
select = f"SELECT {cols} FROM search JOIN claim ON (search.rowid=claim.rowid)"
|
||||||
else:
|
else:
|
||||||
select = f"SELECT {cols} FROM claim"
|
select = f"SELECT {cols} FROM claim LEFT JOIN claim as repost ON (claim.claim_hash=repost.reposted_claim_hash)"
|
||||||
|
|
||||||
sql, values = query(
|
sql, values = query(
|
||||||
select if for_count else select+"""
|
select if for_count else select+"""
|
||||||
|
|
|
@ -107,11 +107,8 @@ class LBRYElectrumX(ElectrumX):
|
||||||
self.bp: LBRYBlockProcessor = self.session_mgr.bp
|
self.bp: LBRYBlockProcessor = self.session_mgr.bp
|
||||||
self.db: LBRYDB = self.bp.db
|
self.db: LBRYDB = self.bp.db
|
||||||
# space separated list of channel URIs used for filtering bad content
|
# space separated list of channel URIs used for filtering bad content
|
||||||
filtering_channels = self.env.default('FILTERING_CHANNELS', '')
|
filtering_channels = self.env.default('FILTERING_CHANNELS_IDS', '1111')
|
||||||
if ' ' in filtering_channels:
|
self.filtering_channels_ids = list(filter(None, filtering_channels.split(' ')))
|
||||||
self.filtering_channels_uris = filtering_channels.split(' ')
|
|
||||||
else:
|
|
||||||
self.filtering_channels_uris = []
|
|
||||||
|
|
||||||
def set_request_handlers(self, ptuple):
|
def set_request_handlers(self, ptuple):
|
||||||
super().set_request_handlers(ptuple)
|
super().set_request_handlers(ptuple)
|
||||||
|
@ -186,6 +183,7 @@ class LBRYElectrumX(ElectrumX):
|
||||||
|
|
||||||
async def claimtrie_search(self, **kwargs):
|
async def claimtrie_search(self, **kwargs):
|
||||||
if kwargs:
|
if kwargs:
|
||||||
|
kwargs['blocklist_channel_ids'] = self.filtering_channels_ids
|
||||||
return await self.run_and_cache_query('search', reader.search_to_bytes, kwargs)
|
return await self.run_and_cache_query('search', reader.search_to_bytes, kwargs)
|
||||||
|
|
||||||
async def claimtrie_resolve(self, *urls):
|
async def claimtrie_resolve(self, *urls):
|
||||||
|
|
|
@ -750,13 +750,31 @@ class StreamCommands(ClaimTestCase):
|
||||||
{claim['claim_id'] for claim in reposts_on_claim_list}
|
{claim['claim_id'] for claim in reposts_on_claim_list}
|
||||||
)
|
)
|
||||||
# check that it resolves fine too
|
# check that it resolves fine too
|
||||||
# todo: should resolve show the repost information?
|
|
||||||
resolved_reposts = await self.resolve(['@reposting-goodies/repost-on-channel', 'newstuff-again'])
|
resolved_reposts = await self.resolve(['@reposting-goodies/repost-on-channel', 'newstuff-again'])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[resolution['claim_id'] for resolution in resolved_reposts.values()],
|
[resolution['claim_id'] for resolution in resolved_reposts.values()],
|
||||||
[claim['claim_id'] for claim in reposts_on_claim_list]
|
[claim['claim_id'] for claim in reposts_on_claim_list]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def test_filtering_channels_for_removing_content(self):
|
||||||
|
await self.out(self.channel_create('@badstuff', '1.0'))
|
||||||
|
await self.out(self.stream_create('not_bad', '1.1', channel_name='@badstuff'))
|
||||||
|
tx = await self.out(self.stream_create('too_bad', '1.1', channel_name='@badstuff'))
|
||||||
|
claim_id = tx['outputs'][0]['claim_id']
|
||||||
|
filtering1 = await self.out(self.channel_create('@filtering1', '1.0'))
|
||||||
|
filtering1 = filtering1['outputs'][0]['claim_id']
|
||||||
|
await self.stream_repost(claim_id, 'filter1', '1.1', channel_name='@filtering1')
|
||||||
|
await self.conductor.spv_node.stop()
|
||||||
|
await self.conductor.spv_node.start(
|
||||||
|
self.conductor.blockchain_node, extraconf={'FILTERING_CHANNELS_IDS': filtering1}
|
||||||
|
)
|
||||||
|
await self.ledger.stop()
|
||||||
|
await self.ledger.start()
|
||||||
|
filtered_claim_search = await self.claim_search(name='too_bad')
|
||||||
|
self.assertEqual(filtered_claim_search, [])
|
||||||
|
filtered_claim_search = await self.claim_search(name='not_bad')
|
||||||
|
self.assertEqual(len(filtered_claim_search), 1)
|
||||||
|
|
||||||
async def test_publish_updates_file_list(self):
|
async def test_publish_updates_file_list(self):
|
||||||
tx = await self.out(self.stream_create(title='created'))
|
tx = await self.out(self.stream_create(title='created'))
|
||||||
txo = tx['outputs'][0]
|
txo = tx['outputs'][0]
|
||||||
|
|
|
@ -205,7 +205,7 @@ class SPVNode:
|
||||||
self.session_timeout = 600
|
self.session_timeout = 600
|
||||||
self.rpc_port = '0' # disabled by default
|
self.rpc_port = '0' # disabled by default
|
||||||
|
|
||||||
async def start(self, blockchain_node: 'BlockchainNode'):
|
async def start(self, blockchain_node: 'BlockchainNode', extraconf=None):
|
||||||
self.data_path = tempfile.mkdtemp()
|
self.data_path = tempfile.mkdtemp()
|
||||||
conf = {
|
conf = {
|
||||||
'DB_DIRECTORY': self.data_path,
|
'DB_DIRECTORY': self.data_path,
|
||||||
|
@ -218,6 +218,8 @@ class SPVNode:
|
||||||
'INDIVIDUAL_TAG_INDEXES': '',
|
'INDIVIDUAL_TAG_INDEXES': '',
|
||||||
'RPC_PORT': self.rpc_port
|
'RPC_PORT': self.rpc_port
|
||||||
}
|
}
|
||||||
|
if extraconf:
|
||||||
|
conf.update(extraconf)
|
||||||
# TODO: don't use os.environ
|
# TODO: don't use os.environ
|
||||||
os.environ.update(conf)
|
os.environ.update(conf)
|
||||||
self.server = Server(Env(self.coin_class))
|
self.server = Server(Env(self.coin_class))
|
||||||
|
|
Loading…
Reference in a new issue