2020-07-12 00:18:33 +02:00
|
|
|
import logging
|
|
|
|
import itertools
|
|
|
|
from typing import List, Dict
|
|
|
|
|
|
|
|
from lbry.schema.url import URL
|
2020-08-04 16:41:49 +02:00
|
|
|
from lbry.schema.result import Outputs as ResultOutput
|
2020-07-12 00:18:33 +02:00
|
|
|
from lbry.error import ResolveCensoredError
|
|
|
|
from lbry.blockchain.transaction import Output
|
2020-09-11 02:52:23 +02:00
|
|
|
from . import rows_to_txos
|
2020-07-12 00:18:33 +02:00
|
|
|
|
|
|
|
from ..query_context import context
|
2020-09-11 02:52:23 +02:00
|
|
|
from .search import select_claims
|
2020-07-12 00:18:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-11 02:52:23 +02:00
|
|
|
def resolve_claims(**constraints):
|
|
|
|
censor = context().get_resolve_censor()
|
|
|
|
rows = context().fetchall(select_claims(**constraints))
|
|
|
|
return rows_to_txos(rows), censor
|
|
|
|
|
|
|
|
|
2020-09-07 20:51:49 +02:00
|
|
|
def _get_referenced_rows(txo_rows: List[Output], censor_channels: List[bytes]):
|
|
|
|
repost_hashes = set(txo.reposted_claim.claim_hash for txo in txo_rows if txo.reposted_claim)
|
2020-07-12 00:18:33 +02:00
|
|
|
channel_hashes = set(itertools.chain(
|
2020-09-07 20:51:49 +02:00
|
|
|
(txo.channel.claim_hash for txo in txo_rows if txo.channel),
|
2020-07-12 00:18:33 +02:00
|
|
|
censor_channels
|
|
|
|
))
|
|
|
|
|
|
|
|
reposted_txos = []
|
|
|
|
if repost_hashes:
|
2020-09-11 02:52:23 +02:00
|
|
|
reposted_txos = resolve_claims(**{'claim.claim_hash__in': repost_hashes})
|
2020-09-07 20:51:49 +02:00
|
|
|
if reposted_txos:
|
|
|
|
reposted_txos = reposted_txos[0]
|
|
|
|
channel_hashes |= set(txo.channel.claim_hash for txo in reposted_txos if txo.channel)
|
2020-07-12 00:18:33 +02:00
|
|
|
|
|
|
|
channel_txos = []
|
|
|
|
if channel_hashes:
|
2020-09-11 02:52:23 +02:00
|
|
|
channel_txos = resolve_claims(**{'claim.claim_hash__in': channel_hashes})
|
2020-09-07 20:51:49 +02:00
|
|
|
channel_txos = channel_txos[0] if channel_txos else []
|
2020-07-12 00:18:33 +02:00
|
|
|
|
|
|
|
# channels must come first for client side inflation to work properly
|
|
|
|
return channel_txos + reposted_txos
|
|
|
|
|
|
|
|
|
2020-08-04 17:47:03 +02:00
|
|
|
def protobuf_resolve(urls, **kwargs) -> str:
|
2020-09-07 20:51:49 +02:00
|
|
|
txo_rows = [resolve_url(raw_url) for raw_url in urls]
|
|
|
|
extra_txo_rows = _get_referenced_rows(
|
2020-09-09 23:36:01 +02:00
|
|
|
[txo_row for txo_row in txo_rows if isinstance(txo_row, Output)],
|
2020-09-07 20:51:49 +02:00
|
|
|
[txo.censor_hash for txo in txo_rows if isinstance(txo, ResolveCensoredError)]
|
|
|
|
)
|
|
|
|
return ResultOutput.to_base64(txo_rows, extra_txo_rows)
|
2020-08-04 16:41:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def resolve(urls, **kwargs) -> Dict[str, Output]:
|
2020-07-12 00:18:33 +02:00
|
|
|
return {url: resolve_url(url) for url in urls}
|
|
|
|
|
|
|
|
|
|
|
|
def resolve_url(raw_url):
|
|
|
|
try:
|
|
|
|
url = URL.parse(raw_url)
|
|
|
|
except ValueError as e:
|
|
|
|
return e
|
|
|
|
|
|
|
|
channel = None
|
|
|
|
|
|
|
|
if url.has_channel:
|
|
|
|
q = url.channel.to_dict()
|
|
|
|
if set(q) == {'name'}:
|
|
|
|
q['is_controlling'] = True
|
|
|
|
else:
|
|
|
|
q['order_by'] = ['^creation_height']
|
2020-09-11 02:52:23 +02:00
|
|
|
matches, censor = resolve_claims(**q, limit=1)
|
2020-07-12 00:18:33 +02:00
|
|
|
if matches:
|
|
|
|
channel = matches[0]
|
|
|
|
elif censor.censored:
|
|
|
|
return ResolveCensoredError(raw_url, next(iter(censor.censored)))
|
2020-09-11 02:52:23 +02:00
|
|
|
elif not channel:
|
2020-07-12 00:18:33 +02:00
|
|
|
return LookupError(f'Could not find channel in "{raw_url}".')
|
|
|
|
|
|
|
|
if url.has_stream:
|
|
|
|
q = url.stream.to_dict()
|
|
|
|
if channel is not None:
|
|
|
|
q['order_by'] = ['^creation_height']
|
|
|
|
q['channel_hash'] = channel.claim_hash
|
|
|
|
q['is_signature_valid'] = True
|
|
|
|
elif set(q) == {'name'}:
|
|
|
|
q['is_controlling'] = True
|
2020-09-11 02:52:23 +02:00
|
|
|
matches, censor = resolve_claims(**q, limit=1)
|
2020-07-12 00:18:33 +02:00
|
|
|
if matches:
|
2020-09-07 20:51:49 +02:00
|
|
|
stream = matches[0]
|
2020-09-11 02:52:23 +02:00
|
|
|
stream.channel = channel
|
2020-09-07 20:51:49 +02:00
|
|
|
return stream
|
2020-07-12 00:18:33 +02:00
|
|
|
elif censor.censored:
|
|
|
|
return ResolveCensoredError(raw_url, next(iter(censor.censored)))
|
|
|
|
else:
|
|
|
|
return LookupError(f'Could not find claim at "{raw_url}".')
|
|
|
|
|
|
|
|
return channel
|