remove logger from reader context
This commit is contained in:
parent
9b3f7e133b
commit
3c4574f11f
5 changed files with 6 additions and 10 deletions
|
@ -1,7 +1,6 @@
|
|||
import time
|
||||
import struct
|
||||
import sqlite3
|
||||
import logging
|
||||
from operator import itemgetter
|
||||
from typing import Tuple, List, Dict, Union, Type, Optional
|
||||
from binascii import unhexlify
|
||||
|
@ -74,7 +73,6 @@ class ReaderState:
|
|||
is_tracking_metrics: bool
|
||||
ledger: Type[BaseLedger]
|
||||
query_timeout: float
|
||||
log: logging.Logger
|
||||
|
||||
def close(self):
|
||||
self.db.close()
|
||||
|
@ -97,14 +95,14 @@ class ReaderState:
|
|||
ctx: ContextVar[Optional[ReaderState]] = ContextVar('ctx')
|
||||
|
||||
|
||||
def initializer(log, _path, _ledger_name, query_timeout, _measure=False):
|
||||
def initializer(_path, _ledger_name, query_timeout, _measure=False):
|
||||
db = sqlite3.connect(_path, isolation_level=None, uri=True)
|
||||
db.row_factory = sqlite3.Row
|
||||
ctx.set(
|
||||
ReaderState(
|
||||
db=db, stack=[], metrics={}, is_tracking_metrics=_measure,
|
||||
ledger=MainNetLedger if _ledger_name == 'mainnet' else RegTestLedger,
|
||||
query_timeout=query_timeout, log=log
|
||||
query_timeout=query_timeout
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -172,9 +170,7 @@ def execute_query(sql, values) -> List:
|
|||
if context.is_tracking_metrics:
|
||||
context.metrics['execute_query'][-1]['sql'] = plain_sql
|
||||
if str(err) == "interrupted":
|
||||
context.log.warning("interrupted slow sqlite query:\n%s", plain_sql)
|
||||
raise SQLiteInterruptedError(context.metrics)
|
||||
context.log.exception('failed running query', exc_info=err)
|
||||
raise SQLiteOperationalError(context.metrics)
|
||||
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ class LBRYSessionManager(SessionManager):
|
|||
path = os.path.join(self.env.db_dir, 'claims.db')
|
||||
args = dict(
|
||||
initializer=reader.initializer,
|
||||
initargs=(self.logger, path, self.env.coin.NET, self.env.database_query_timeout,
|
||||
initargs=(path, self.env.coin.NET, self.env.database_query_timeout,
|
||||
self.env.track_metrics)
|
||||
)
|
||||
if self.env.max_query_workers is not None and self.env.max_query_workers == 0:
|
||||
|
|
|
@ -112,7 +112,7 @@ async def search(executor, kwargs):
|
|||
|
||||
|
||||
async def main(db_path, max_query_time):
|
||||
args = dict(initializer=initializer, initargs=(log, db_path, MainNetLedger, 0.25))
|
||||
args = dict(initializer=initializer, initargs=(db_path, MainNetLedger, 0.25))
|
||||
workers = max(os.cpu_count(), 4)
|
||||
log.info(f"using {workers} reader processes")
|
||||
query_executor = ProcessPoolExecutor(workers, **args)
|
||||
|
|
|
@ -45,7 +45,7 @@ async def run_times(executor, iterations, show=True):
|
|||
|
||||
async def main():
|
||||
executor = ProcessPoolExecutor(
|
||||
4, initializer=reader.initializer, initargs=(log, db_path, 'mainnet', 1.0, True)
|
||||
4, initializer=reader.initializer, initargs=(db_path, 'mainnet', 1.0, True)
|
||||
)
|
||||
#await run_times(executor, 4, show=False)
|
||||
#await run_times(executor, 1)
|
||||
|
|
|
@ -46,7 +46,7 @@ class TestSQLDB(unittest.TestCase):
|
|||
db_url = 'file:test_sqldb?mode=memory&cache=shared'
|
||||
self.sql = writer.SQLDB(self, db_url)
|
||||
self.addCleanup(self.sql.close)
|
||||
reader.initializer(logging.getLogger(__name__), db_url, 'regtest', self.query_timeout)
|
||||
reader.initializer(db_url, 'regtest', self.query_timeout)
|
||||
self.addCleanup(reader.cleanup)
|
||||
self.timer = Timer('BlockProcessor')
|
||||
self.sql.open()
|
||||
|
|
Loading…
Add table
Reference in a new issue