Merge pull request #2776 from lbryio/resolve_includes_censoring_channels_url

`resolve` results which are censored now include full details of channel which did the censoring
This commit is contained in:
Lex Berezhny 2020-02-08 23:32:44 -05:00 committed by GitHub
commit e834209d40
11 changed files with 141 additions and 68 deletions

View file

@ -53,7 +53,7 @@ Code | Name | Message
407 | DataDownload | Failed to download blob. *generic* 407 | DataDownload | Failed to download blob. *generic*
410 | Resolve | Failed to resolve '{url}'. 410 | Resolve | Failed to resolve '{url}'.
411 | ResolveTimeout | Failed to resolve '{url}' within the timeout. 411 | ResolveTimeout | Failed to resolve '{url}' within the timeout.
411 | ResolveCensored | Resolve of '{url}' was censored by channel with claim id '{censor_id}'. 411 | ResolveCensored | Resolve of '{url}' was censored by channel with claim id '{claim_id(censor_hash)}'.
420 | KeyFeeAboveMaxAllowed | {message} 420 | KeyFeeAboveMaxAllowed | {message}
421 | InvalidPassword | Password is invalid. 421 | InvalidPassword | Password is invalid.
422 | IncompatibleWalletServer | '{server}:{port}' has an incompatibly old version. 422 | IncompatibleWalletServer | '{server}:{port}' has an incompatibly old version.

View file

@ -1,4 +1,4 @@
from .base import BaseError from .base import BaseError, claim_id
class UserInputError(BaseError): class UserInputError(BaseError):
@ -16,18 +16,22 @@ class CommandError(UserInputError):
class CommandDoesNotExistError(CommandError): class CommandDoesNotExistError(CommandError):
def __init__(self, command): def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' does not exist.") super().__init__(f"Command '{command}' does not exist.")
class CommandDeprecatedError(CommandError): class CommandDeprecatedError(CommandError):
def __init__(self, command): def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is deprecated.") super().__init__(f"Command '{command}' is deprecated.")
class CommandInvalidArgumentError(CommandError): class CommandInvalidArgumentError(CommandError):
def __init__(self, argument, command): def __init__(self, argument, command):
self.argument = argument
self.command = command
super().__init__(f"Invalid argument '{argument}' to command '{command}'.") super().__init__(f"Invalid argument '{argument}' to command '{command}'.")
@ -37,6 +41,7 @@ class CommandTemporarilyUnavailableError(CommandError):
""" """
def __init__(self, command): def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is temporarily unavailable.") super().__init__(f"Command '{command}' is temporarily unavailable.")
@ -46,6 +51,7 @@ class CommandPermanentlyUnavailableError(CommandError):
""" """
def __init__(self, command): def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is permanently unavailable.") super().__init__(f"Command '{command}' is permanently unavailable.")
@ -58,12 +64,15 @@ class InputValueError(UserInputError, ValueError):
class GenericInputValueError(InputValueError): class GenericInputValueError(InputValueError):
def __init__(self, value, argument): def __init__(self, value, argument):
self.value = value
self.argument = argument
super().__init__(f"The value '{value}' for argument '{argument}' is not valid.") super().__init__(f"The value '{value}' for argument '{argument}' is not valid.")
class InputValueIsNoneError(InputValueError): class InputValueIsNoneError(InputValueError):
def __init__(self, argument): def __init__(self, argument):
self.argument = argument
super().__init__(f"None or null is not valid value for argument '{argument}'.") super().__init__(f"None or null is not valid value for argument '{argument}'.")
@ -79,6 +88,7 @@ class ConfigWriteError(ConfigurationError):
""" """
def __init__(self, path): def __init__(self, path):
self.path = path
super().__init__(f"Cannot write configuration file '{path}'.") super().__init__(f"Cannot write configuration file '{path}'.")
@ -88,6 +98,7 @@ class ConfigReadError(ConfigurationError):
""" """
def __init__(self, path): def __init__(self, path):
self.path = path
super().__init__(f"Cannot find provided configuration file '{path}'.") super().__init__(f"Cannot find provided configuration file '{path}'.")
@ -97,18 +108,21 @@ class ConfigParseError(ConfigurationError):
""" """
def __init__(self, path): def __init__(self, path):
self.path = path
super().__init__(f"Failed to parse the configuration file '{path}'.") super().__init__(f"Failed to parse the configuration file '{path}'.")
class ConfigMissingError(ConfigurationError): class ConfigMissingError(ConfigurationError):
def __init__(self, path): def __init__(self, path):
self.path = path
super().__init__(f"Configuration file '{path}' is missing setting that has no default / fallback.") super().__init__(f"Configuration file '{path}' is missing setting that has no default / fallback.")
class ConfigInvalidError(ConfigurationError): class ConfigInvalidError(ConfigurationError):
def __init__(self, path): def __init__(self, path):
self.path = path
super().__init__(f"Configuration file '{path}' has setting with invalid value.") super().__init__(f"Configuration file '{path}' has setting with invalid value.")
@ -188,24 +202,29 @@ class DataDownloadError(WalletError):
class ResolveError(WalletError): class ResolveError(WalletError):
def __init__(self, url): def __init__(self, url):
self.url = url
super().__init__(f"Failed to resolve '{url}'.") super().__init__(f"Failed to resolve '{url}'.")
class ResolveTimeoutError(WalletError): class ResolveTimeoutError(WalletError):
def __init__(self, url): def __init__(self, url):
self.url = url
super().__init__(f"Failed to resolve '{url}' within the timeout.") super().__init__(f"Failed to resolve '{url}' within the timeout.")
class ResolveCensoredError(WalletError): class ResolveCensoredError(WalletError):
def __init__(self, url, censor_id): def __init__(self, url, censor_hash):
super().__init__(f"Resolve of '{url}' was censored by channel with claim id '{censor_id}'.") self.url = url
self.censor_hash = censor_hash
super().__init__(f"Resolve of '{url}' was censored by channel with claim id '{claim_id(censor_hash)}'.")
class KeyFeeAboveMaxAllowedError(WalletError): class KeyFeeAboveMaxAllowedError(WalletError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
@ -218,6 +237,8 @@ class InvalidPasswordError(WalletError):
class IncompatibleWalletServerError(WalletError): class IncompatibleWalletServerError(WalletError):
def __init__(self, server, port): def __init__(self, server, port):
self.server = server
self.port = port
super().__init__(f"'{server}:{port}' has an incompatibly old version.") super().__init__(f"'{server}:{port}' has an incompatibly old version.")
@ -278,30 +299,35 @@ class DownloadCancelledError(BlobError):
class DownloadSDTimeoutError(BlobError): class DownloadSDTimeoutError(BlobError):
def __init__(self, download): def __init__(self, download):
self.download = download
super().__init__(f"Failed to download sd blob {download} within timeout.") super().__init__(f"Failed to download sd blob {download} within timeout.")
class DownloadDataTimeoutError(BlobError): class DownloadDataTimeoutError(BlobError):
def __init__(self, download): def __init__(self, download):
self.download = download
super().__init__(f"Failed to download data blobs for sd hash {download} within timeout.") super().__init__(f"Failed to download data blobs for sd hash {download} within timeout.")
class InvalidStreamDescriptorError(BlobError): class InvalidStreamDescriptorError(BlobError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
class InvalidDataError(BlobError): class InvalidDataError(BlobError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
class InvalidBlobHashError(BlobError): class InvalidBlobHashError(BlobError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
@ -314,12 +340,14 @@ class ComponentError(BaseError):
class ComponentStartConditionNotMetError(ComponentError): class ComponentStartConditionNotMetError(ComponentError):
def __init__(self, components): def __init__(self, components):
self.components = components
super().__init__(f"Unresolved dependencies for: {components}") super().__init__(f"Unresolved dependencies for: {components}")
class ComponentsNotStartedError(ComponentError): class ComponentsNotStartedError(ComponentError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
@ -332,16 +360,20 @@ class CurrencyExchangeError(BaseError):
class InvalidExchangeRateResponseError(CurrencyExchangeError): class InvalidExchangeRateResponseError(CurrencyExchangeError):
def __init__(self, source, reason): def __init__(self, source, reason):
self.source = source
self.reason = reason
super().__init__(f"Failed to get exchange rate from {source}: {reason}") super().__init__(f"Failed to get exchange rate from {source}: {reason}")
class CurrencyConversionError(CurrencyExchangeError): class CurrencyConversionError(CurrencyExchangeError):
def __init__(self, message): def __init__(self, message):
self.message = message
super().__init__(f"{message}") super().__init__(f"{message}")
class InvalidCurrencyError(CurrencyExchangeError): class InvalidCurrencyError(CurrencyExchangeError):
def __init__(self, currency): def __init__(self, currency):
self.currency = currency
super().__init__(f"Invalid currency: {currency} is not a supported currency.") super().__init__(f"Invalid currency: {currency} is not a supported currency.")

View file

@ -1,2 +1,9 @@
from binascii import hexlify
def claim_id(claim_hash):
return hexlify(claim_hash[::-1]).decode()
class BaseError(Exception): class BaseError(Exception):
pass pass

View file

@ -13,10 +13,12 @@ class {name}({parents}):{doc}
""" """
INIT = """ INIT = """
def __init__({args}): def __init__({args}):{fields}
super().__init__({format}"{message}") super().__init__({format}"{message}")
""" """
FUNCTIONS = ['claim_id']
class ErrorClass: class ErrorClass:
@ -50,10 +52,20 @@ class ErrorClass:
def get_arguments(self): def get_arguments(self):
args = ['self'] args = ['self']
for arg in re.findall('{([a-z0-1_]+)}', self.message): for arg in re.findall('{([a-z0-1_()]+)}', self.message):
for func in FUNCTIONS:
if arg.startswith(f'{func}('):
arg = arg[len(f'{func}('):-1]
break
args.append(arg) args.append(arg)
return args return args
@staticmethod
def get_fields(args):
if len(args) > 1:
return f''.join(f'\n{INDENT*2}self.{field} = {field}' for field in args[1:])
return ''
@staticmethod @staticmethod
def get_doc_string(doc): def get_doc_string(doc):
if doc: if doc:
@ -69,7 +81,8 @@ class ErrorClass:
args = self.get_arguments() args = self.get_arguments()
if self.is_leaf: if self.is_leaf:
out.write((CLASS + INIT).format( out.write((CLASS + INIT).format(
name=self.class_name, parents=', '.join(parents), args=', '.join(args), name=self.class_name, parents=', '.join(parents),
args=', '.join(args), fields=self.get_fields(args),
message=self.message, doc=self.get_doc_string(self.comment), format='f' if len(args) > 1 else '' message=self.message, doc=self.get_doc_string(self.comment), format='f' if len(args) > 1 else ''
)) ))
else: else:
@ -102,7 +115,7 @@ def find_parent(stack, child):
def generate(out): def generate(out):
out.write('from .base import BaseError\n') out.write(f"from .base import BaseError, {', '.join(FUNCTIONS)}\n")
stack = {} stack = {}
for error in get_errors(): for error in get_errors():
error.render(out, find_parent(stack, error)) error.render(out, find_parent(stack, error))

View file

@ -13,6 +13,16 @@ NOT_FOUND = ErrorMessage.Code.Name(ErrorMessage.NOT_FOUND)
BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED) BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED)
def set_reference(reference, claim_hash, rows):
if claim_hash:
for txo in rows:
if claim_hash == txo['claim_hash']:
reference.tx_hash = txo['txo_hash'][:32]
reference.nout = struct.unpack('<I', txo['txo_hash'][32:])[0]
reference.height = txo['height']
return
class Censor: class Censor:
__slots__ = 'streams', 'channels', 'censored', 'total' __slots__ = 'streams', 'channels', 'censored', 'total'
@ -39,12 +49,12 @@ class Censor:
self.total += 1 self.total += 1
return was_censored return was_censored
def to_message(self, outputs: OutputsMessage): def to_message(self, outputs: OutputsMessage, extra_txo_rows):
outputs.blocked_total = self.total outputs.blocked_total = self.total
for censoring_channel_hash, count in self.censored.items(): for censoring_channel_hash, count in self.censored.items():
block = outputs.blocked.add() blocked = outputs.blocked.add()
block.count = count blocked.count = count
block.channel_hash = censoring_channel_hash set_reference(blocked.channel, censoring_channel_hash, extra_txo_rows)
class Outputs: class Outputs:
@ -66,26 +76,35 @@ class Outputs:
for txo_message in self.extra_txos: for txo_message in self.extra_txos:
self.message_to_txo(txo_message, tx_map) self.message_to_txo(txo_message, tx_map)
txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos] txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos]
return txos, self.inflate_blocked() return txos, self.inflate_blocked(tx_map)
def inflate_blocked(self): def inflate_blocked(self, tx_map):
return { return {
"total": self.blocked_total, "total": self.blocked_total,
"channels": { "channels": [{
hexlify(message.channel_hash[::-1]).decode(): message.count 'channel': self.message_to_txo(blocked.channel, tx_map),
for message in self.blocked 'blocked': blocked.count
} } for blocked in self.blocked]
} }
def message_to_txo(self, txo_message, tx_map): def message_to_txo(self, txo_message, tx_map):
if txo_message.WhichOneof('meta') == 'error': if txo_message.WhichOneof('meta') == 'error':
return { error = {
'error': { 'error': {
'name': txo_message.error.Code.Name(txo_message.error.code).lower(), 'name': txo_message.error.Code.Name(txo_message.error.code),
'text': txo_message.error.text, 'text': txo_message.error.text,
} }
} }
txo = tx_map[txo_message.tx_hash].outputs[txo_message.nout] if error['error']['name'] == BLOCKED:
error['error']['censor'] = self.message_to_txo(
txo_message.error.blocked.channel, tx_map
)
return error
tx = tx_map.get(txo_message.tx_hash)
if not tx:
return
txo = tx.outputs[txo_message.nout]
if txo_message.WhichOneof('meta') == 'claim': if txo_message.WhichOneof('meta') == 'claim':
claim = txo_message.claim claim = txo_message.claim
txo.meta = { txo.meta = {
@ -145,7 +164,7 @@ class Outputs:
if total is not None: if total is not None:
page.total = total page.total = total
if blocked is not None: if blocked is not None:
blocked.to_message(page) blocked.to_message(page, extra_txo_rows)
for row in txo_rows: for row in txo_rows:
cls.row_to_message(row, page.txos.add(), extra_txo_rows) cls.row_to_message(row, page.txos.add(), extra_txo_rows)
for row in extra_txo_rows: for row in extra_txo_rows:
@ -162,6 +181,7 @@ class Outputs:
txo_message.error.code = ErrorMessage.NOT_FOUND txo_message.error.code = ErrorMessage.NOT_FOUND
elif isinstance(txo, ResolveCensoredError): elif isinstance(txo, ResolveCensoredError):
txo_message.error.code = ErrorMessage.BLOCKED txo_message.error.code = ErrorMessage.BLOCKED
set_reference(txo_message.error.blocked.channel, txo.censor_hash, extra_txo_rows)
return return
txo_message.tx_hash = txo['txo_hash'][:32] txo_message.tx_hash = txo['txo_hash'][:32]
txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:]) txo_message.nout, = struct.unpack('<I', txo['txo_hash'][32:])
@ -184,20 +204,5 @@ class Outputs:
txo_message.claim.trending_mixed = txo['trending_mixed'] txo_message.claim.trending_mixed = txo['trending_mixed']
txo_message.claim.trending_local = txo['trending_local'] txo_message.claim.trending_local = txo['trending_local']
txo_message.claim.trending_global = txo['trending_global'] txo_message.claim.trending_global = txo['trending_global']
cls.set_reference(txo_message, 'channel', txo['channel_hash'], extra_txo_rows) set_reference(txo_message.claim.channel, txo['channel_hash'], extra_txo_rows)
cls.set_reference(txo_message, 'repost', txo['reposted_claim_hash'], extra_txo_rows) set_reference(txo_message.claim.repost, txo['reposted_claim_hash'], extra_txo_rows)
@staticmethod
def set_blocked(message, blocked):
message.blocked_total = blocked.total
@staticmethod
def set_reference(message, attr, claim_hash, rows):
if claim_hash:
for txo in rows:
if claim_hash == txo['claim_hash']:
reference = getattr(message.claim, attr)
reference.tx_hash = txo['txo_hash'][:32]
reference.nout = struct.unpack('<I', txo['txo_hash'][32:])[0]
reference.height = txo['height']
break

View file

@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
name='result.proto', name='result.proto',
package='pb', package='pb',
syntax='proto3', syntax='proto3',
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"\x97\x01\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\x12\x1c\n\x07\x62locked\x18\x05 \x03(\x0b\x32\x0b.pb.Blocked\x12\x15\n\rblocked_total\x18\x06 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xaf\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_group\x18\x16 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x17 \x01(\x02\x12\x16\n\x0etrending_local\x18\x18 \x01(\x02\x12\x17\n\x0ftrending_global\x18\x19 \x01(\x02\"\x94\x01\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x1c\n\x07\x62locked\x18\x03 \x01(\x0b\x32\x0b.pb.Blocked\"A\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x12\x0b\n\x07\x42LOCKED\x10\x03\".\n\x07\x42locked\x12\r\n\x05\x63ount\x18\x01 \x01(\r\x12\x14\n\x0c\x63hannel_hash\x18\x02 \x01(\x0c\x62\x06proto3') serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"\x97\x01\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\x12\x1c\n\x07\x62locked\x18\x05 \x03(\x0b\x32\x0b.pb.Blocked\x12\x15\n\rblocked_total\x18\x06 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xaf\x03\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_group\x18\x16 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x17 \x01(\x02\x12\x16\n\x0etrending_local\x18\x18 \x01(\x02\x12\x17\n\x0ftrending_global\x18\x19 \x01(\x02\"\x94\x01\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x1c\n\x07\x62locked\x18\x03 \x01(\x0b\x32\x0b.pb.Blocked\"A\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x12\x0b\n\x07\x42LOCKED\x10\x03\"5\n\x07\x42locked\x12\r\n\x05\x63ount\x18\x01 \x01(\r\x12\x1b\n\x07\x63hannel\x18\x02 \x01(\x0b\x32\n.pb.Outputb\x06proto3')
) )
_sym_db.RegisterFileDescriptor(DESCRIPTOR) _sym_db.RegisterFileDescriptor(DESCRIPTOR)
@ -388,9 +388,9 @@ _BLOCKED = _descriptor.Descriptor(
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
_descriptor.FieldDescriptor( _descriptor.FieldDescriptor(
name='channel_hash', full_name='pb.Blocked.channel_hash', index=1, name='channel', full_name='pb.Blocked.channel', index=1,
number=2, type=12, cpp_type=9, label=1, number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=_b(""), has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None, message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None, is_extension=False, extension_scope=None,
options=None), options=None),
@ -407,7 +407,7 @@ _BLOCKED = _descriptor.Descriptor(
oneofs=[ oneofs=[
], ],
serialized_start=884, serialized_start=884,
serialized_end=930, serialized_end=937,
) )
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT _OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT
@ -426,6 +426,7 @@ _CLAIMMETA.fields_by_name['repost'].message_type = _OUTPUT
_ERROR.fields_by_name['code'].enum_type = _ERROR_CODE _ERROR.fields_by_name['code'].enum_type = _ERROR_CODE
_ERROR.fields_by_name['blocked'].message_type = _BLOCKED _ERROR.fields_by_name['blocked'].message_type = _BLOCKED
_ERROR_CODE.containing_type = _ERROR _ERROR_CODE.containing_type = _ERROR
_BLOCKED.fields_by_name['channel'].message_type = _OUTPUT
DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS
DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT
DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA

View file

@ -4,10 +4,11 @@ import apsw
import logging import logging
from operator import itemgetter from operator import itemgetter
from typing import Tuple, List, Dict, Union, Type, Optional from typing import Tuple, List, Dict, Union, Type, Optional
from binascii import unhexlify, hexlify from binascii import unhexlify
from decimal import Decimal from decimal import Decimal
from contextvars import ContextVar from contextvars import ContextVar
from functools import wraps from functools import wraps
from itertools import chain
from dataclasses import dataclass from dataclasses import dataclass
from lbry.wallet.database import query, interpolate from lbry.wallet.database import query, interpolate
@ -388,9 +389,13 @@ def search_claims(censor: Censor, **constraints) -> List:
) )
def _get_referenced_rows(censor: Censor, txo_rows: List[dict]): def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]):
censor = ctx.get().get_resolve_censor()
repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows))) repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows)))
channel_hashes = set(filter(None, map(itemgetter('channel_hash'), txo_rows))) channel_hashes = set(chain(
filter(None, map(itemgetter('channel_hash'), txo_rows)),
censor_channels
))
reposted_txos = [] reposted_txos = []
if repost_hashes: if repost_hashes:
@ -418,7 +423,7 @@ def search(constraints) -> Tuple[List, List, int, int, Censor]:
context = ctx.get() context = ctx.get()
search_censor = context.get_search_censor() search_censor = context.get_search_censor()
txo_rows = search_claims(search_censor, **constraints) txo_rows = search_claims(search_censor, **constraints)
extra_txo_rows = _get_referenced_rows(context.get_resolve_censor(), txo_rows) extra_txo_rows = _get_referenced_rows(txo_rows, search_censor.censored.keys())
return txo_rows, extra_txo_rows, constraints['offset'], total, search_censor return txo_rows, extra_txo_rows, constraints['offset'], total, search_censor
@ -426,7 +431,8 @@ def search(constraints) -> Tuple[List, List, int, int, Censor]:
def resolve(urls) -> Tuple[List, List]: def resolve(urls) -> Tuple[List, List]:
txo_rows = [resolve_url(raw_url) for raw_url in urls] txo_rows = [resolve_url(raw_url) for raw_url in urls]
extra_txo_rows = _get_referenced_rows( extra_txo_rows = _get_referenced_rows(
ctx.get().get_resolve_censor(), [r for r in txo_rows if isinstance(r, dict)] [txo for txo in txo_rows if isinstance(txo, dict)],
[txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)]
) )
return txo_rows, extra_txo_rows return txo_rows, extra_txo_rows
@ -452,7 +458,7 @@ def resolve_url(raw_url):
if matches: if matches:
channel = matches[0] channel = matches[0]
elif censor.censored: elif censor.censored:
return ResolveCensoredError(raw_url, hexlify(next(iter(censor.censored))[::-1]).decode()) return ResolveCensoredError(raw_url, next(iter(censor.censored)))
else: else:
return LookupError(f'Could not find channel in "{raw_url}".') return LookupError(f'Could not find channel in "{raw_url}".')
@ -472,7 +478,7 @@ def resolve_url(raw_url):
if matches: if matches:
return matches[0] return matches[0]
elif censor.censored: elif censor.censored:
return ResolveCensoredError(raw_url, hexlify(next(iter(censor.censored))[::-1]).decode()) return ResolveCensoredError(raw_url, next(iter(censor.censored)))
else: else:
return LookupError(f'Could not find claim at "{raw_url}".') return LookupError(f'Could not find claim at "{raw_url}".')

View file

@ -241,10 +241,10 @@ class SQLDB:
streams, channels = {}, {} streams, channels = {}, {}
if channel_hashes: if channel_hashes:
sql = query( sql = query(
"SELECT claim.channel_hash, claim.reposted_claim_hash, reposted.claim_type " "SELECT repost.channel_hash, repost.reposted_claim_hash, target.claim_type "
"FROM claim JOIN claim AS reposted ON (reposted.claim_hash=claim.reposted_claim_hash)", **{ "FROM claim as repost JOIN claim AS target ON (target.claim_hash=repost.reposted_claim_hash)", **{
'claim.reposted_claim_hash__is_not_null': 1, 'repost.reposted_claim_hash__is_not_null': 1,
'claim.channel_hash__in': channel_hashes 'repost.channel_hash__in': channel_hashes
} }
) )
for blocked_claim in self.execute(*sql): for blocked_claim in self.execute(*sql):

View file

@ -405,10 +405,10 @@ class ClaimCommands(ClaimTestCase):
await self.ledger.wait(channel_tx) await self.ledger.wait(channel_tx)
r = await self.claim_list(resolve=True) r = await self.claim_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name']) self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling']) self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.channel_list(resolve=True) r = await self.channel_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name']) self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling']) self.assertTrue(r[1]['meta']['is_controlling'])
# confirm it # confirm it
@ -430,10 +430,10 @@ class ClaimCommands(ClaimTestCase):
await self.ledger.wait(stream_tx) await self.ledger.wait(stream_tx)
r = await self.claim_list(resolve=True) r = await self.claim_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name']) self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling']) self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.stream_list(resolve=True) r = await self.stream_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name']) self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling']) self.assertTrue(r[1]['meta']['is_controlling'])
# confirm it # confirm it
@ -845,18 +845,26 @@ class StreamCommands(ClaimTestCase):
# search for blocked content directly # search for blocked content directly
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content')) result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content'))
blocked = result['blocked']
self.assertEqual([], result['items']) self.assertEqual([], result['items'])
self.assertEqual({"channels": {filtering_channel_id: 1}, "total": 1}, result['blocked']) self.assertEqual(1, blocked['total'])
self.assertEqual(1, len(blocked['channels']))
self.assertEqual(1, blocked['channels'][0]['blocked'])
self.assertTrue(blocked['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
# search channel containing blocked content # search channel containing blocked content
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel')) result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel'))
blocked = result['blocked']
self.assertEqual(1, len(result['items'])) self.assertEqual(1, len(result['items']))
self.assertEqual({"channels": {filtering_channel_id: 1}, "total": 1}, result['blocked']) self.assertEqual(1, blocked['total'])
self.assertEqual(1, len(blocked['channels']))
self.assertEqual(1, blocked['channels'][0]['blocked'])
self.assertTrue(blocked['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
# content was filtered by not_tag before censoring # content was filtered by not_tag before censoring
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', not_tags=["good", "bad"])) result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', not_tags=["good", "bad"]))
self.assertEqual(0, len(result['items'])) self.assertEqual(0, len(result['items']))
self.assertEqual({"channels": {}, "total": 0}, result['blocked']) self.assertEqual({"channels": [], "total": 0}, result['blocked'])
blocking_channel_id = self.get_claim_id( blocking_channel_id = self.get_claim_id(
await self.channel_create('@blocking', '1.0') await self.channel_create('@blocking', '1.0')
@ -874,8 +882,9 @@ class StreamCommands(ClaimTestCase):
# blocked content is not resolveable # blocked content is not resolveable
result = await self.out(self.daemon.jsonrpc_resolve('lbry://@some_channel/bad_content')) result = await self.out(self.daemon.jsonrpc_resolve('lbry://@some_channel/bad_content'))
error = result['lbry://@some_channel/bad_content']['error'] error = result['lbry://@some_channel/bad_content']['error']
self.assertTrue(error['name'], 'blocked') self.assertEqual(error['name'], 'BLOCKED')
self.assertTrue(error['text'].startswith("Resolve of 'lbry://@some_channel/bad_content' was censored")) self.assertTrue(error['text'].startswith("Resolve of 'lbry://@some_channel/bad_content' was censored"))
self.assertTrue(error['censor']['short_url'].startswith('lbry://@blocking#'))
async def test_publish_updates_file_list(self): async def test_publish_updates_file_list(self):
tx = await self.stream_create(title='created') tx = await self.stream_create(title='created')

View file

@ -15,7 +15,7 @@ class BaseResolveTestCase(CommandTestCase):
other = (await self.resolve(name))[name] other = (await self.resolve(name))[name]
if claim_id is None: if claim_id is None:
self.assertIn('error', other) self.assertIn('error', other)
self.assertEqual(other['error']['name'], 'not_found') self.assertEqual(other['error']['name'], 'NOT_FOUND')
else: else:
self.assertEqual(claim_id, other['claim_id']) self.assertEqual(claim_id, other['claim_id'])
@ -186,7 +186,7 @@ class ResolveCommand(BaseResolveTestCase):
self.assertEqual(response, { self.assertEqual(response, {
'lbry://@abc/on-channel-claim': { 'lbry://@abc/on-channel-claim': {
'error': { 'error': {
'name': 'not_found', 'name': 'NOT_FOUND',
'text': 'Could not find claim at "lbry://@abc/on-channel-claim".', 'text': 'Could not find claim at "lbry://@abc/on-channel-claim".',
} }
} }
@ -265,7 +265,7 @@ class ResolveCommand(BaseResolveTestCase):
self.assertEqual(response, { self.assertEqual(response, {
'@olds/bad_example': { '@olds/bad_example': {
'error': { 'error': {
'name': 'not_found', 'name': 'NOT_FOUND',
'text': 'Could not find claim at "@olds/bad_example".', 'text': 'Could not find claim at "@olds/bad_example".',
} }
} }

View file

@ -88,7 +88,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
self.assertEqual( self.assertEqual(
response['lbry://@spam/hovercraft'], response['lbry://@spam/hovercraft'],
{'error': { {'error': {
'name': 'not_found', 'name': 'NOT_FOUND',
'text': 'Could not find claim at "lbry://@spam/hovercraft".' 'text': 'Could not find claim at "lbry://@spam/hovercraft".'
}} }}
) )
@ -192,7 +192,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
self.assertEqual( self.assertEqual(
response[uri], response[uri],
{'error': { {'error': {
'name': 'not_found', 'name': 'NOT_FOUND',
'text': f'Could not find claim at "{uri}".' 'text': f'Could not find claim at "{uri}".'
}} }}
) )