censored searches/resolves include metadata of channel which did the censoring
This commit is contained in:
parent
6fbbf36143
commit
9607d21828
7 changed files with 81 additions and 60 deletions
|
@ -13,6 +13,16 @@ NOT_FOUND = ErrorMessage.Code.Name(ErrorMessage.NOT_FOUND)
|
||||||
BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED)
|
BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED)
|
||||||
|
|
||||||
|
|
||||||
|
def set_reference(reference, claim_hash, rows):
|
||||||
|
if claim_hash:
|
||||||
|
for txo in rows:
|
||||||
|
if claim_hash == txo['claim_hash']:
|
||||||
|
reference.tx_hash = txo['txo_hash'][:32]
|
||||||
|
reference.nout = struct.unpack('<I', txo['txo_hash'][32:])[0]
|
||||||
|
reference.height = txo['height']
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
class Censor:
|
class Censor:
|
||||||
|
|
||||||
__slots__ = 'streams', 'channels', 'censored', 'total'
|
__slots__ = 'streams', 'channels', 'censored', 'total'
|
||||||
|
@ -39,12 +49,12 @@ class Censor:
|
||||||
self.total += 1
|
self.total += 1
|
||||||
return was_censored
|
return was_censored
|
||||||
|
|
||||||
def to_message(self, outputs: OutputsMessage):
|
def to_message(self, outputs: OutputsMessage, extra_txo_rows):
|
||||||
outputs.blocked_total = self.total
|
outputs.blocked_total = self.total
|
||||||
for censoring_channel_hash, count in self.censored.items():
|
for censoring_channel_hash, count in self.censored.items():
|
||||||
block = outputs.blocked.add()
|
blocked = outputs.blocked.add()
|
||||||
block.count = count
|
blocked.count = count
|
||||||
block.channel_hash = censoring_channel_hash
|
set_reference(blocked.channel, censoring_channel_hash, extra_txo_rows)
|
||||||
|
|
||||||
|
|
||||||
class Outputs:
|
class Outputs:
|
||||||
|
@ -66,26 +76,35 @@ class Outputs:
|
||||||
for txo_message in self.extra_txos:
|
for txo_message in self.extra_txos:
|
||||||
self.message_to_txo(txo_message, tx_map)
|
self.message_to_txo(txo_message, tx_map)
|
||||||
txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos]
|
txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos]
|
||||||
return txos, self.inflate_blocked()
|
return txos, self.inflate_blocked(tx_map)
|
||||||
|
|
||||||
def inflate_blocked(self):
|
def inflate_blocked(self, tx_map):
|
||||||
return {
|
return {
|
||||||
"total": self.blocked_total,
|
"total": self.blocked_total,
|
||||||
"channels": {
|
"channels": [{
|
||||||
hexlify(message.channel_hash[::-1]).decode(): message.count
|
'channel': self.message_to_txo(blocked.channel, tx_map),
|
||||||
for message in self.blocked
|
'blocked': blocked.count
|
||||||
}
|
} for blocked in self.blocked]
|
||||||
}
|
}
|
||||||
|
|
||||||
def message_to_txo(self, txo_message, tx_map):
|
def message_to_txo(self, txo_message, tx_map):
|
||||||
if txo_message.WhichOneof('meta') == 'error':
|
if txo_message.WhichOneof('meta') == 'error':
|
||||||
return {
|
error = {
|
||||||
'error': {
|
'error': {
|
||||||
'name': txo_message.error.Code.Name(txo_message.error.code).lower(),
|
'name': txo_message.error.Code.Name(txo_message.error.code),
|
||||||
'text': txo_message.error.text,
|
'text': txo_message.error.text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
txo = tx_map[txo_message.tx_hash].outputs[txo_message.nout]
|
if error['error']['name'] == BLOCKED:
|
||||||
|
error['error']['censor'] = self.message_to_txo(
|
||||||
|
txo_message.error.blocked.channel, tx_map
|
||||||
|
)
|
||||||
|
return error
|
||||||
|
|
||||||
|
tx = tx_map.get(txo_message.tx_hash)
|
||||||
|
if not tx:
|
||||||
|
return
|
||||||
|
txo = tx.outputs[txo_message.nout]
|
||||||
if txo_message.WhichOneof('meta') == 'claim':
|
if txo_message.WhichOneof('meta') == 'claim':
|
||||||
claim = txo_message.claim
|
claim = txo_message.claim
|
||||||
txo.meta = {
|
txo.meta = {
|
||||||
|
@ -145,7 +164,7 @@ class Outputs:
|
||||||
if total is not None:
|
if total is not None:
|
||||||
page.total = total
|
page.total = total
|
||||||
if blocked is not None:
|
if blocked is not None:
|
||||||
blocked.to_message(page)
|
blocked.to_message(page, extra_txo_rows)
|
||||||
for row in txo_rows:
|
for row in txo_rows:
|
||||||
cls.row_to_message(row, page.txos.add(), extra_txo_rows)
|
cls.row_to_message(row, page.txos.add(), extra_txo_rows)
|
||||||
for row in extra_txo_rows:
|
for row in extra_txo_rows:
|
||||||
|
@ -162,6 +181,7 @@ class Outputs:
|
||||||
txo_message.error.code = ErrorMessage.NOT_FOUND
|
txo_message.error.code = ErrorMessage.NOT_FOUND
|
||||||
elif isinstance(txo, ResolveCensoredError):
|
elif isinstance(txo, ResolveCensoredError):
|
||||||
txo_message.error.code = ErrorMessage.BLOCKED
|
txo_message.error.code = ErrorMessage.BLOCKED
|
||||||
|
set_reference(txo_message.error.blocked.channel, txo.censor_hash, extra_txo_rows)
|
||||||
return
|
return
|
||||||
txo_message.tx_hash = txo['txo_hash'][:32]
|
txo_message.tx_hash = txo['txo_hash'][:32]
|
||||||
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
|
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
|
||||||
|
@ -184,20 +204,5 @@ class Outputs:
|
||||||
txo_message.claim.trending_mixed = txo['trending_mixed']
|
txo_message.claim.trending_mixed = txo['trending_mixed']
|
||||||
txo_message.claim.trending_local = txo['trending_local']
|
txo_message.claim.trending_local = txo['trending_local']
|
||||||
txo_message.claim.trending_global = txo['trending_global']
|
txo_message.claim.trending_global = txo['trending_global']
|
||||||
cls.set_reference(txo_message, 'channel', txo['channel_hash'], extra_txo_rows)
|
set_reference(txo_message.claim.channel, txo['channel_hash'], extra_txo_rows)
|
||||||
cls.set_reference(txo_message, 'repost', txo['reposted_claim_hash'], extra_txo_rows)
|
set_reference(txo_message.claim.repost, txo['reposted_claim_hash'], extra_txo_rows)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_blocked(message, blocked):
|
|
||||||
message.blocked_total = blocked.total
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_reference(message, attr, claim_hash, rows):
|
|
||||||
if claim_hash:
|
|
||||||
for txo in rows:
|
|
||||||
if claim_hash == txo['claim_hash']:
|
|
||||||
reference = getattr(message.claim, attr)
|
|
||||||
reference.tx_hash = txo['txo_hash'][:32]
|
|
||||||
reference.nout = struct.unpack('<I', txo['txo_hash'][32:])[0]
|
|
||||||
reference.height = txo['height']
|
|
||||||
break
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
|
||||||
name='result.proto',
|
name='result.proto',
|
||||||
package='pb',
|
package='pb',
|
||||||
syntax='proto3',
|
syntax='proto3',
|
||||||
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"\x97\x01\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\x12\x1c\n\x07\x62locked\x18\x05 \x03(\x0b\x32\x0b.pb.Blocked\x12\x15\n\rblocked_total\x18\x06 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xaf\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_group\x18\x16 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x17 \x01(\x02\x12\x16\n\x0etrending_local\x18\x18 \x01(\x02\x12\x17\n\x0ftrending_global\x18\x19 \x01(\x02\"\x94\x01\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x1c\n\x07\x62locked\x18\x03 \x01(\x0b\x32\x0b.pb.Blocked\"A\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x12\x0b\n\x07\x42LOCKED\x10\x03\".\n\x07\x42locked\x12\r\n\x05\x63ount\x18\x01 \x01(\r\x12\x14\n\x0c\x63hannel_hash\x18\x02 \x01(\x0c\x62\x06proto3')
|
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"\x97\x01\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\x12\x1c\n\x07\x62locked\x18\x05 \x03(\x0b\x32\x0b.pb.Blocked\x12\x15\n\rblocked_total\x18\x06 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xaf\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_group\x18\x16 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x17 \x01(\x02\x12\x16\n\x0etrending_local\x18\x18 \x01(\x02\x12\x17\n\x0ftrending_global\x18\x19 \x01(\x02\"\x94\x01\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x1c\n\x07\x62locked\x18\x03 \x01(\x0b\x32\x0b.pb.Blocked\"A\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x12\x0b\n\x07\x42LOCKED\x10\x03\"5\n\x07\x42locked\x12\r\n\x05\x63ount\x18\x01 \x01(\r\x12\x1b\n\x07\x63hannel\x18\x02 \x01(\x0b\x32\n.pb.Outputb\x06proto3')
|
||||||
)
|
)
|
||||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||||
|
|
||||||
|
@ -388,9 +388,9 @@ _BLOCKED = _descriptor.Descriptor(
|
||||||
is_extension=False, extension_scope=None,
|
is_extension=False, extension_scope=None,
|
||||||
options=None),
|
options=None),
|
||||||
_descriptor.FieldDescriptor(
|
_descriptor.FieldDescriptor(
|
||||||
name='channel_hash', full_name='pb.Blocked.channel_hash', index=1,
|
name='channel', full_name='pb.Blocked.channel', index=1,
|
||||||
number=2, type=12, cpp_type=9, label=1,
|
number=2, type=11, cpp_type=10, label=1,
|
||||||
has_default_value=False, default_value=_b(""),
|
has_default_value=False, default_value=None,
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
message_type=None, enum_type=None, containing_type=None,
|
||||||
is_extension=False, extension_scope=None,
|
is_extension=False, extension_scope=None,
|
||||||
options=None),
|
options=None),
|
||||||
|
@ -407,7 +407,7 @@ _BLOCKED = _descriptor.Descriptor(
|
||||||
oneofs=[
|
oneofs=[
|
||||||
],
|
],
|
||||||
serialized_start=884,
|
serialized_start=884,
|
||||||
serialized_end=930,
|
serialized_end=937,
|
||||||
)
|
)
|
||||||
|
|
||||||
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT
|
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT
|
||||||
|
@ -426,6 +426,7 @@ _CLAIMMETA.fields_by_name['repost'].message_type = _OUTPUT
|
||||||
_ERROR.fields_by_name['code'].enum_type = _ERROR_CODE
|
_ERROR.fields_by_name['code'].enum_type = _ERROR_CODE
|
||||||
_ERROR.fields_by_name['blocked'].message_type = _BLOCKED
|
_ERROR.fields_by_name['blocked'].message_type = _BLOCKED
|
||||||
_ERROR_CODE.containing_type = _ERROR
|
_ERROR_CODE.containing_type = _ERROR
|
||||||
|
_BLOCKED.fields_by_name['channel'].message_type = _OUTPUT
|
||||||
DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS
|
DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS
|
||||||
DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT
|
DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT
|
||||||
DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA
|
DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA
|
||||||
|
|
|
@ -4,10 +4,11 @@ import apsw
|
||||||
import logging
|
import logging
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import Tuple, List, Dict, Union, Type, Optional
|
from typing import Tuple, List, Dict, Union, Type, Optional
|
||||||
from binascii import unhexlify, hexlify
|
from binascii import unhexlify
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from contextvars import ContextVar
|
from contextvars import ContextVar
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
from itertools import chain
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from lbry.wallet.database import query, interpolate
|
from lbry.wallet.database import query, interpolate
|
||||||
|
@ -388,9 +389,13 @@ def search_claims(censor: Censor, **constraints) -> List:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_referenced_rows(censor: Censor, txo_rows: List[dict]):
|
def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]):
|
||||||
|
censor = ctx.get().get_resolve_censor()
|
||||||
repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows)))
|
repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows)))
|
||||||
channel_hashes = set(filter(None, map(itemgetter('channel_hash'), txo_rows)))
|
channel_hashes = set(chain(
|
||||||
|
filter(None, map(itemgetter('channel_hash'), txo_rows)),
|
||||||
|
censor_channels
|
||||||
|
))
|
||||||
|
|
||||||
reposted_txos = []
|
reposted_txos = []
|
||||||
if repost_hashes:
|
if repost_hashes:
|
||||||
|
@ -418,7 +423,7 @@ def search(constraints) -> Tuple[List, List, int, int, Censor]:
|
||||||
context = ctx.get()
|
context = ctx.get()
|
||||||
search_censor = context.get_search_censor()
|
search_censor = context.get_search_censor()
|
||||||
txo_rows = search_claims(search_censor, **constraints)
|
txo_rows = search_claims(search_censor, **constraints)
|
||||||
extra_txo_rows = _get_referenced_rows(context.get_resolve_censor(), txo_rows)
|
extra_txo_rows = _get_referenced_rows(txo_rows, search_censor.censored.keys())
|
||||||
return txo_rows, extra_txo_rows, constraints['offset'], total, search_censor
|
return txo_rows, extra_txo_rows, constraints['offset'], total, search_censor
|
||||||
|
|
||||||
|
|
||||||
|
@ -426,7 +431,8 @@ def search(constraints) -> Tuple[List, List, int, int, Censor]:
|
||||||
def resolve(urls) -> Tuple[List, List]:
|
def resolve(urls) -> Tuple[List, List]:
|
||||||
txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
||||||
extra_txo_rows = _get_referenced_rows(
|
extra_txo_rows = _get_referenced_rows(
|
||||||
ctx.get().get_resolve_censor(), [r for r in txo_rows if isinstance(r, dict)]
|
[txo for txo in txo_rows if isinstance(txo, dict)],
|
||||||
|
[txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)]
|
||||||
)
|
)
|
||||||
return txo_rows, extra_txo_rows
|
return txo_rows, extra_txo_rows
|
||||||
|
|
||||||
|
@ -452,7 +458,7 @@ def resolve_url(raw_url):
|
||||||
if matches:
|
if matches:
|
||||||
channel = matches[0]
|
channel = matches[0]
|
||||||
elif censor.censored:
|
elif censor.censored:
|
||||||
return ResolveCensoredError(raw_url, hexlify(next(iter(censor.censored))[::-1]).decode())
|
return ResolveCensoredError(raw_url, next(iter(censor.censored)))
|
||||||
else:
|
else:
|
||||||
return LookupError(f'Could not find channel in "{raw_url}".')
|
return LookupError(f'Could not find channel in "{raw_url}".')
|
||||||
|
|
||||||
|
@ -472,7 +478,7 @@ def resolve_url(raw_url):
|
||||||
if matches:
|
if matches:
|
||||||
return matches[0]
|
return matches[0]
|
||||||
elif censor.censored:
|
elif censor.censored:
|
||||||
return ResolveCensoredError(raw_url, hexlify(next(iter(censor.censored))[::-1]).decode())
|
return ResolveCensoredError(raw_url, next(iter(censor.censored)))
|
||||||
else:
|
else:
|
||||||
return LookupError(f'Could not find claim at "{raw_url}".')
|
return LookupError(f'Could not find claim at "{raw_url}".')
|
||||||
|
|
||||||
|
|
|
@ -241,10 +241,10 @@ class SQLDB:
|
||||||
streams, channels = {}, {}
|
streams, channels = {}, {}
|
||||||
if channel_hashes:
|
if channel_hashes:
|
||||||
sql = query(
|
sql = query(
|
||||||
"SELECT claim.channel_hash, claim.reposted_claim_hash, reposted.claim_type "
|
"SELECT repost.channel_hash, repost.reposted_claim_hash, target.claim_type "
|
||||||
"FROM claim JOIN claim AS reposted ON (reposted.claim_hash=claim.reposted_claim_hash)", **{
|
"FROM claim as repost JOIN claim AS target ON (target.claim_hash=repost.reposted_claim_hash)", **{
|
||||||
'claim.reposted_claim_hash__is_not_null': 1,
|
'repost.reposted_claim_hash__is_not_null': 1,
|
||||||
'claim.channel_hash__in': channel_hashes
|
'repost.channel_hash__in': channel_hashes
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
for blocked_claim in self.execute(*sql):
|
for blocked_claim in self.execute(*sql):
|
||||||
|
|
|
@ -405,10 +405,10 @@ class ClaimCommands(ClaimTestCase):
|
||||||
await self.ledger.wait(channel_tx)
|
await self.ledger.wait(channel_tx)
|
||||||
|
|
||||||
r = await self.claim_list(resolve=True)
|
r = await self.claim_list(resolve=True)
|
||||||
self.assertEqual('not_found', r[0]['meta']['error']['name'])
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
||||||
self.assertTrue(r[1]['meta']['is_controlling'])
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
||||||
r = await self.channel_list(resolve=True)
|
r = await self.channel_list(resolve=True)
|
||||||
self.assertEqual('not_found', r[0]['meta']['error']['name'])
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
||||||
self.assertTrue(r[1]['meta']['is_controlling'])
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
||||||
|
|
||||||
# confirm it
|
# confirm it
|
||||||
|
@ -430,10 +430,10 @@ class ClaimCommands(ClaimTestCase):
|
||||||
await self.ledger.wait(stream_tx)
|
await self.ledger.wait(stream_tx)
|
||||||
|
|
||||||
r = await self.claim_list(resolve=True)
|
r = await self.claim_list(resolve=True)
|
||||||
self.assertEqual('not_found', r[0]['meta']['error']['name'])
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
||||||
self.assertTrue(r[1]['meta']['is_controlling'])
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
||||||
r = await self.stream_list(resolve=True)
|
r = await self.stream_list(resolve=True)
|
||||||
self.assertEqual('not_found', r[0]['meta']['error']['name'])
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
||||||
self.assertTrue(r[1]['meta']['is_controlling'])
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
||||||
|
|
||||||
# confirm it
|
# confirm it
|
||||||
|
@ -845,18 +845,26 @@ class StreamCommands(ClaimTestCase):
|
||||||
|
|
||||||
# search for blocked content directly
|
# search for blocked content directly
|
||||||
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content'))
|
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content'))
|
||||||
|
blocked = result['blocked']
|
||||||
self.assertEqual([], result['items'])
|
self.assertEqual([], result['items'])
|
||||||
self.assertEqual({"channels": {filtering_channel_id: 1}, "total": 1}, result['blocked'])
|
self.assertEqual(1, blocked['total'])
|
||||||
|
self.assertEqual(1, len(blocked['channels']))
|
||||||
|
self.assertEqual(1, blocked['channels'][0]['blocked'])
|
||||||
|
self.assertTrue(blocked['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
|
||||||
|
|
||||||
# search channel containing blocked content
|
# search channel containing blocked content
|
||||||
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel'))
|
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel'))
|
||||||
|
blocked = result['blocked']
|
||||||
self.assertEqual(1, len(result['items']))
|
self.assertEqual(1, len(result['items']))
|
||||||
self.assertEqual({"channels": {filtering_channel_id: 1}, "total": 1}, result['blocked'])
|
self.assertEqual(1, blocked['total'])
|
||||||
|
self.assertEqual(1, len(blocked['channels']))
|
||||||
|
self.assertEqual(1, blocked['channels'][0]['blocked'])
|
||||||
|
self.assertTrue(blocked['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
|
||||||
|
|
||||||
# content was filtered by not_tag before censoring
|
# content was filtered by not_tag before censoring
|
||||||
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', not_tags=["good", "bad"]))
|
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', not_tags=["good", "bad"]))
|
||||||
self.assertEqual(0, len(result['items']))
|
self.assertEqual(0, len(result['items']))
|
||||||
self.assertEqual({"channels": {}, "total": 0}, result['blocked'])
|
self.assertEqual({"channels": [], "total": 0}, result['blocked'])
|
||||||
|
|
||||||
blocking_channel_id = self.get_claim_id(
|
blocking_channel_id = self.get_claim_id(
|
||||||
await self.channel_create('@blocking', '1.0')
|
await self.channel_create('@blocking', '1.0')
|
||||||
|
@ -874,8 +882,9 @@ class StreamCommands(ClaimTestCase):
|
||||||
# blocked content is not resolveable
|
# blocked content is not resolveable
|
||||||
result = await self.out(self.daemon.jsonrpc_resolve('lbry://@some_channel/bad_content'))
|
result = await self.out(self.daemon.jsonrpc_resolve('lbry://@some_channel/bad_content'))
|
||||||
error = result['lbry://@some_channel/bad_content']['error']
|
error = result['lbry://@some_channel/bad_content']['error']
|
||||||
self.assertTrue(error['name'], 'blocked')
|
self.assertEqual(error['name'], 'BLOCKED')
|
||||||
self.assertTrue(error['text'].startswith("Resolve of 'lbry://@some_channel/bad_content' was censored"))
|
self.assertTrue(error['text'].startswith("Resolve of 'lbry://@some_channel/bad_content' was censored"))
|
||||||
|
self.assertTrue(error['censor']['short_url'].startswith('lbry://@blocking#'))
|
||||||
|
|
||||||
async def test_publish_updates_file_list(self):
|
async def test_publish_updates_file_list(self):
|
||||||
tx = await self.stream_create(title='created')
|
tx = await self.stream_create(title='created')
|
||||||
|
|
|
@ -15,7 +15,7 @@ class BaseResolveTestCase(CommandTestCase):
|
||||||
other = (await self.resolve(name))[name]
|
other = (await self.resolve(name))[name]
|
||||||
if claim_id is None:
|
if claim_id is None:
|
||||||
self.assertIn('error', other)
|
self.assertIn('error', other)
|
||||||
self.assertEqual(other['error']['name'], 'not_found')
|
self.assertEqual(other['error']['name'], 'NOT_FOUND')
|
||||||
else:
|
else:
|
||||||
self.assertEqual(claim_id, other['claim_id'])
|
self.assertEqual(claim_id, other['claim_id'])
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ class ResolveCommand(BaseResolveTestCase):
|
||||||
self.assertEqual(response, {
|
self.assertEqual(response, {
|
||||||
'lbry://@abc/on-channel-claim': {
|
'lbry://@abc/on-channel-claim': {
|
||||||
'error': {
|
'error': {
|
||||||
'name': 'not_found',
|
'name': 'NOT_FOUND',
|
||||||
'text': 'Could not find claim at "lbry://@abc/on-channel-claim".',
|
'text': 'Could not find claim at "lbry://@abc/on-channel-claim".',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ class ResolveCommand(BaseResolveTestCase):
|
||||||
self.assertEqual(response, {
|
self.assertEqual(response, {
|
||||||
'@olds/bad_example': {
|
'@olds/bad_example': {
|
||||||
'error': {
|
'error': {
|
||||||
'name': 'not_found',
|
'name': 'NOT_FOUND',
|
||||||
'text': 'Could not find claim at "@olds/bad_example".',
|
'text': 'Could not find claim at "@olds/bad_example".',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response['lbry://@spam/hovercraft'],
|
response['lbry://@spam/hovercraft'],
|
||||||
{'error': {
|
{'error': {
|
||||||
'name': 'not_found',
|
'name': 'NOT_FOUND',
|
||||||
'text': 'Could not find claim at "lbry://@spam/hovercraft".'
|
'text': 'Could not find claim at "lbry://@spam/hovercraft".'
|
||||||
}}
|
}}
|
||||||
)
|
)
|
||||||
|
@ -192,7 +192,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
response[uri],
|
response[uri],
|
||||||
{'error': {
|
{'error': {
|
||||||
'name': 'not_found',
|
'name': 'NOT_FOUND',
|
||||||
'text': f'Could not find claim at "{uri}".'
|
'text': f'Could not find claim at "{uri}".'
|
||||||
}}
|
}}
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue