remove logger from reader context

This commit is contained in:
Jack Robison 2019-11-01 13:32:13 -04:00
parent 9b3f7e133b
commit 3c4574f11f
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
5 changed files with 6 additions and 10 deletions

View file

@ -1,7 +1,6 @@
import time import time
import struct import struct
import sqlite3 import sqlite3
import logging
from operator import itemgetter from operator import itemgetter
from typing import Tuple, List, Dict, Union, Type, Optional from typing import Tuple, List, Dict, Union, Type, Optional
from binascii import unhexlify from binascii import unhexlify
@ -74,7 +73,6 @@ class ReaderState:
is_tracking_metrics: bool is_tracking_metrics: bool
ledger: Type[BaseLedger] ledger: Type[BaseLedger]
query_timeout: float query_timeout: float
log: logging.Logger
def close(self): def close(self):
self.db.close() self.db.close()
@ -97,14 +95,14 @@ class ReaderState:
ctx: ContextVar[Optional[ReaderState]] = ContextVar('ctx') ctx: ContextVar[Optional[ReaderState]] = ContextVar('ctx')
def initializer(log, _path, _ledger_name, query_timeout, _measure=False): def initializer(_path, _ledger_name, query_timeout, _measure=False):
db = sqlite3.connect(_path, isolation_level=None, uri=True) db = sqlite3.connect(_path, isolation_level=None, uri=True)
db.row_factory = sqlite3.Row db.row_factory = sqlite3.Row
ctx.set( ctx.set(
ReaderState( ReaderState(
db=db, stack=[], metrics={}, is_tracking_metrics=_measure, db=db, stack=[], metrics={}, is_tracking_metrics=_measure,
ledger=MainNetLedger if _ledger_name == 'mainnet' else RegTestLedger, ledger=MainNetLedger if _ledger_name == 'mainnet' else RegTestLedger,
query_timeout=query_timeout, log=log query_timeout=query_timeout
) )
) )
@ -172,9 +170,7 @@ def execute_query(sql, values) -> List:
if context.is_tracking_metrics: if context.is_tracking_metrics:
context.metrics['execute_query'][-1]['sql'] = plain_sql context.metrics['execute_query'][-1]['sql'] = plain_sql
if str(err) == "interrupted": if str(err) == "interrupted":
context.log.warning("interrupted slow sqlite query:\n%s", plain_sql)
raise SQLiteInterruptedError(context.metrics) raise SQLiteInterruptedError(context.metrics)
context.log.exception('failed running query', exc_info=err)
raise SQLiteOperationalError(context.metrics) raise SQLiteOperationalError(context.metrics)

View file

@ -67,7 +67,7 @@ class LBRYSessionManager(SessionManager):
path = os.path.join(self.env.db_dir, 'claims.db') path = os.path.join(self.env.db_dir, 'claims.db')
args = dict( args = dict(
initializer=reader.initializer, initializer=reader.initializer,
initargs=(self.logger, path, self.env.coin.NET, self.env.database_query_timeout, initargs=(path, self.env.coin.NET, self.env.database_query_timeout,
self.env.track_metrics) self.env.track_metrics)
) )
if self.env.max_query_workers is not None and self.env.max_query_workers == 0: if self.env.max_query_workers is not None and self.env.max_query_workers == 0:

View file

@ -112,7 +112,7 @@ async def search(executor, kwargs):
async def main(db_path, max_query_time): async def main(db_path, max_query_time):
args = dict(initializer=initializer, initargs=(log, db_path, MainNetLedger, 0.25)) args = dict(initializer=initializer, initargs=(db_path, MainNetLedger, 0.25))
workers = max(os.cpu_count(), 4) workers = max(os.cpu_count(), 4)
log.info(f"using {workers} reader processes") log.info(f"using {workers} reader processes")
query_executor = ProcessPoolExecutor(workers, **args) query_executor = ProcessPoolExecutor(workers, **args)

View file

@ -45,7 +45,7 @@ async def run_times(executor, iterations, show=True):
async def main(): async def main():
executor = ProcessPoolExecutor( executor = ProcessPoolExecutor(
4, initializer=reader.initializer, initargs=(log, db_path, 'mainnet', 1.0, True) 4, initializer=reader.initializer, initargs=(db_path, 'mainnet', 1.0, True)
) )
#await run_times(executor, 4, show=False) #await run_times(executor, 4, show=False)
#await run_times(executor, 1) #await run_times(executor, 1)

View file

@ -46,7 +46,7 @@ class TestSQLDB(unittest.TestCase):
db_url = 'file:test_sqldb?mode=memory&cache=shared' db_url = 'file:test_sqldb?mode=memory&cache=shared'
self.sql = writer.SQLDB(self, db_url) self.sql = writer.SQLDB(self, db_url)
self.addCleanup(self.sql.close) self.addCleanup(self.sql.close)
reader.initializer(logging.getLogger(__name__), db_url, 'regtest', self.query_timeout) reader.initializer(db_url, 'regtest', self.query_timeout)
self.addCleanup(reader.cleanup) self.addCleanup(reader.cleanup)
self.timer = Timer('BlockProcessor') self.timer = Timer('BlockProcessor')
self.sql.open() self.sql.open()