2019-07-12 07:38:03 +02:00
|
|
|
import time
|
2019-07-11 19:29:26 +02:00
|
|
|
import struct
|
2019-12-08 00:13:13 +01:00
|
|
|
import apsw
|
2019-07-16 18:26:28 +02:00
|
|
|
import logging
|
2019-09-24 17:53:23 +02:00
|
|
|
from operator import itemgetter
|
2019-07-13 06:34:40 +02:00
|
|
|
from typing import Tuple, List, Dict, Union, Type, Optional
|
2020-02-08 00:50:29 +01:00
|
|
|
from binascii import unhexlify
|
2019-07-11 19:29:26 +02:00
|
|
|
from decimal import Decimal
|
|
|
|
from contextvars import ContextVar
|
2019-07-13 06:34:40 +02:00
|
|
|
from functools import wraps
|
2020-02-08 00:50:29 +01:00
|
|
|
from itertools import chain
|
2019-07-13 06:34:40 +02:00
|
|
|
from dataclasses import dataclass
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet.database import query, interpolate
|
2020-02-01 18:53:39 +01:00
|
|
|
from lbry.error import ResolveCensoredError
|
2019-07-11 19:29:26 +02:00
|
|
|
from lbry.schema.url import URL, normalize_name
|
|
|
|
from lbry.schema.tags import clean_tags
|
2020-01-10 16:47:57 +01:00
|
|
|
from lbry.schema.result import Outputs, Censor
|
2020-04-28 18:26:17 +02:00
|
|
|
from lbry.blockchain.ledger import Ledger, RegTestLedger
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2020-05-01 15:29:44 +02:00
|
|
|
from .constants import CLAIM_TYPES, STREAM_TYPES
|
2019-11-14 20:31:49 +01:00
|
|
|
from .full_text_search import FTS_ORDER_BY
|
2019-07-11 19:29:26 +02:00
|
|
|
|
|
|
|
|
2019-12-08 00:13:13 +01:00
|
|
|
class SQLiteOperationalError(apsw.Error):
|
2019-07-18 03:50:20 +02:00
|
|
|
def __init__(self, metrics):
|
|
|
|
super().__init__('sqlite query errored')
|
|
|
|
self.metrics = metrics
|
|
|
|
|
|
|
|
|
2019-12-08 00:13:13 +01:00
|
|
|
class SQLiteInterruptedError(apsw.InterruptError):
|
2019-07-16 18:26:28 +02:00
|
|
|
def __init__(self, metrics):
|
|
|
|
super().__init__('sqlite query interrupted')
|
|
|
|
self.metrics = metrics
|
|
|
|
|
|
|
|
|
2019-07-11 19:29:26 +02:00
|
|
|
|
|
|
|
|
2019-07-13 06:34:40 +02:00
|
|
|
@dataclass
|
|
|
|
class ReaderState:
|
2019-12-08 00:13:13 +01:00
|
|
|
db: apsw.Connection
|
2019-07-13 06:34:40 +02:00
|
|
|
stack: List[List]
|
|
|
|
metrics: Dict
|
|
|
|
is_tracking_metrics: bool
|
2020-01-03 04:18:49 +01:00
|
|
|
ledger: Type[Ledger]
|
2019-07-16 18:26:28 +02:00
|
|
|
query_timeout: float
|
|
|
|
log: logging.Logger
|
2020-01-22 07:55:37 +01:00
|
|
|
blocked_streams: Dict
|
|
|
|
blocked_channels: Dict
|
|
|
|
filtered_streams: Dict
|
|
|
|
filtered_channels: Dict
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2019-07-13 06:34:40 +02:00
|
|
|
def close(self):
|
|
|
|
self.db.close()
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2019-07-13 06:34:40 +02:00
|
|
|
def reset_metrics(self):
|
|
|
|
self.stack = []
|
|
|
|
self.metrics = {}
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2019-07-16 18:26:28 +02:00
|
|
|
def set_query_timeout(self):
|
|
|
|
stop_at = time.perf_counter() + self.query_timeout
|
|
|
|
|
|
|
|
def interruptor():
|
|
|
|
if time.perf_counter() >= stop_at:
|
|
|
|
self.db.interrupt()
|
|
|
|
return
|
|
|
|
|
2019-12-08 00:13:13 +01:00
|
|
|
self.db.setprogresshandler(interruptor, 100)
|
2019-07-16 18:26:28 +02:00
|
|
|
|
2020-01-22 07:55:37 +01:00
|
|
|
def get_resolve_censor(self) -> Censor:
|
|
|
|
return Censor(self.blocked_streams, self.blocked_channels)
|
|
|
|
|
|
|
|
def get_search_censor(self) -> Censor:
|
|
|
|
return Censor(self.filtered_streams, self.filtered_channels)
|
|
|
|
|
2019-07-11 19:29:26 +02:00
|
|
|
|
2019-07-13 06:34:40 +02:00
|
|
|
ctx: ContextVar[Optional[ReaderState]] = ContextVar('ctx')
|
|
|
|
|
|
|
|
|
2020-01-10 16:47:57 +01:00
|
|
|
def row_factory(cursor, row):
|
|
|
|
return {
|
|
|
|
k[0]: (set(row[i].split(',')) if k[0] == 'tags' else row[i])
|
|
|
|
for i, k in enumerate(cursor.getdescription())
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-01-22 07:55:37 +01:00
|
|
|
def initializer(log, _path, _ledger_name, query_timeout, _measure=False, block_and_filter=None):
|
2019-12-08 00:13:13 +01:00
|
|
|
db = apsw.Connection(_path, flags=apsw.SQLITE_OPEN_READONLY | apsw.SQLITE_OPEN_URI)
|
|
|
|
db.setrowtrace(row_factory)
|
2020-01-22 07:55:37 +01:00
|
|
|
if block_and_filter:
|
|
|
|
blocked_streams, blocked_channels, filtered_streams, filtered_channels = block_and_filter
|
|
|
|
else:
|
|
|
|
blocked_streams = blocked_channels = filtered_streams = filtered_channels = {}
|
2019-07-16 18:26:28 +02:00
|
|
|
ctx.set(
|
|
|
|
ReaderState(
|
|
|
|
db=db, stack=[], metrics={}, is_tracking_metrics=_measure,
|
2020-01-03 04:18:49 +01:00
|
|
|
ledger=Ledger if _ledger_name == 'mainnet' else RegTestLedger,
|
2020-01-10 16:47:57 +01:00
|
|
|
query_timeout=query_timeout, log=log,
|
2020-01-22 07:55:37 +01:00
|
|
|
blocked_streams=blocked_streams, blocked_channels=blocked_channels,
|
|
|
|
filtered_streams=filtered_streams, filtered_channels=filtered_channels,
|
2019-07-16 18:26:28 +02:00
|
|
|
)
|
|
|
|
)
|
2019-07-11 19:29:26 +02:00
|
|
|
|
|
|
|
|
2019-07-13 06:34:40 +02:00
|
|
|
def cleanup():
|
|
|
|
ctx.get().close()
|
|
|
|
ctx.set(None)
|
|
|
|
|
|
|
|
|
|
|
|
def measure(func):
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
state = ctx.get()
|
|
|
|
if not state.is_tracking_metrics:
|
|
|
|
return func(*args, **kwargs)
|
2019-07-18 03:50:20 +02:00
|
|
|
metric = {}
|
|
|
|
state.metrics.setdefault(func.__name__, []).append(metric)
|
2019-07-13 06:34:40 +02:00
|
|
|
state.stack.append([])
|
|
|
|
start = time.perf_counter()
|
|
|
|
try:
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
elapsed = int((time.perf_counter()-start)*1000)
|
2019-07-18 03:50:20 +02:00
|
|
|
metric['total'] = elapsed
|
|
|
|
metric['isolated'] = (elapsed-sum(state.stack.pop()))
|
2019-07-13 06:34:40 +02:00
|
|
|
if state.stack:
|
|
|
|
state.stack[-1].append(elapsed)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
def reports_metrics(func):
|
|
|
|
@wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
state = ctx.get()
|
|
|
|
if not state.is_tracking_metrics:
|
|
|
|
return func(*args, **kwargs)
|
|
|
|
state.reset_metrics()
|
|
|
|
r = func(*args, **kwargs)
|
|
|
|
return r, state.metrics
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|