2020-02-21 04:11:25 +01:00
|
|
|
import os
|
2020-01-03 04:18:49 +01:00
|
|
|
import logging
|
|
|
|
import asyncio
|
|
|
|
import sqlite3
|
2020-03-19 18:24:25 +01:00
|
|
|
import platform
|
2020-01-03 04:18:49 +01:00
|
|
|
from binascii import hexlify
|
2020-02-21 04:11:25 +01:00
|
|
|
from dataclasses import dataclass
|
|
|
|
from contextvars import ContextVar
|
2020-01-03 04:18:49 +01:00
|
|
|
from concurrent.futures.thread import ThreadPoolExecutor
|
2020-02-21 04:11:25 +01:00
|
|
|
from concurrent.futures.process import ProcessPoolExecutor
|
2020-01-03 04:18:49 +01:00
|
|
|
from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable, Dict, Optional
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2018-06-12 17:53:29 +02:00
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
sqlite3.enable_callback_tracebacks(True)
|
2018-06-12 17:53:29 +02:00
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
|
2020-02-21 04:11:25 +01:00
|
|
|
@dataclass
|
|
|
|
class ReaderProcessState:
|
|
|
|
cursor: sqlite3.Cursor
|
|
|
|
|
|
|
|
|
|
|
|
reader_context: Optional[ContextVar[ReaderProcessState]] = ContextVar('reader_context')
|
|
|
|
|
|
|
|
|
|
|
|
def initializer(path):
|
2020-02-21 04:33:24 +01:00
|
|
|
db = sqlite3.connect(path)
|
2020-03-20 06:11:05 +01:00
|
|
|
db.row_factory = dict_row_factory
|
2020-02-21 04:33:24 +01:00
|
|
|
db.executescript("pragma journal_mode=WAL;")
|
|
|
|
reader = ReaderProcessState(db.cursor())
|
2020-02-21 04:11:25 +01:00
|
|
|
reader_context.set(reader)
|
|
|
|
|
|
|
|
|
|
|
|
def run_read_only_fetchall(sql, params):
|
|
|
|
cursor = reader_context.get().cursor
|
|
|
|
try:
|
|
|
|
return cursor.execute(sql, params).fetchall()
|
|
|
|
except (Exception, OSError) as e:
|
|
|
|
log.exception('Error running transaction:', exc_info=e)
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
def run_read_only_fetchone(sql, params):
|
|
|
|
cursor = reader_context.get().cursor
|
|
|
|
try:
|
|
|
|
return cursor.execute(sql, params).fetchone()
|
|
|
|
except (Exception, OSError) as e:
|
|
|
|
log.exception('Error running transaction:', exc_info=e)
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2020-03-19 18:24:25 +01:00
|
|
|
if platform.system() == 'Windows' or 'ANDROID_ARGUMENT' in os.environ:
|
|
|
|
ReaderExecutorClass = ThreadPoolExecutor
|
|
|
|
else:
|
|
|
|
ReaderExecutorClass = ProcessPoolExecutor
|
2020-03-17 15:27:27 +01:00
|
|
|
|
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
class AIOSQLite:
|
2020-03-19 18:24:25 +01:00
|
|
|
reader_executor: ReaderExecutorClass
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
# has to be single threaded as there is no mapping of thread:connection
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_executor = ThreadPoolExecutor(max_workers=1)
|
|
|
|
self.writer_connection: Optional[sqlite3.Connection] = None
|
2020-01-03 04:18:49 +01:00
|
|
|
self._closing = False
|
|
|
|
self.query_count = 0
|
2020-02-25 20:15:27 +01:00
|
|
|
self.write_lock = asyncio.Lock()
|
|
|
|
self.writers = 0
|
|
|
|
self.read_ready = asyncio.Event()
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
async def connect(cls, path: Union[bytes, str], *args, **kwargs):
|
|
|
|
sqlite3.enable_callback_tracebacks(True)
|
|
|
|
db = cls()
|
2020-02-18 00:12:52 +01:00
|
|
|
|
|
|
|
def _connect_writer():
|
|
|
|
db.writer_connection = sqlite3.connect(path, *args, **kwargs)
|
2020-02-21 04:11:25 +01:00
|
|
|
|
|
|
|
readers = max(os.cpu_count() - 2, 2)
|
2020-03-19 18:24:25 +01:00
|
|
|
db.reader_executor = ReaderExecutorClass(
|
2020-02-21 04:11:25 +01:00
|
|
|
max_workers=readers, initializer=initializer, initargs=(path, )
|
|
|
|
)
|
2020-02-18 00:12:52 +01:00
|
|
|
await asyncio.get_event_loop().run_in_executor(db.writer_executor, _connect_writer)
|
2020-02-25 20:15:27 +01:00
|
|
|
db.read_ready.set()
|
2020-01-03 04:18:49 +01:00
|
|
|
return db
|
|
|
|
|
|
|
|
async def close(self):
|
|
|
|
if self._closing:
|
|
|
|
return
|
|
|
|
self._closing = True
|
2020-02-18 00:12:52 +01:00
|
|
|
await asyncio.get_event_loop().run_in_executor(self.writer_executor, self.writer_connection.close)
|
|
|
|
self.writer_executor.shutdown(wait=True)
|
2020-02-21 04:11:25 +01:00
|
|
|
self.reader_executor.shutdown(wait=True)
|
2020-02-25 20:15:27 +01:00
|
|
|
self.read_ready.clear()
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_connection = None
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
def executemany(self, sql: str, params: Iterable):
|
|
|
|
params = params if params is not None else []
|
|
|
|
# this fetchall is needed to prevent SQLITE_MISUSE
|
|
|
|
return self.run(lambda conn: conn.executemany(sql, params).fetchall())
|
|
|
|
|
|
|
|
def executescript(self, script: str) -> Awaitable:
|
|
|
|
return self.run(lambda conn: conn.executescript(script))
|
|
|
|
|
2020-02-25 20:15:27 +01:00
|
|
|
async def _execute_fetch(self, sql: str, parameters: Iterable = None,
|
2020-03-20 06:11:05 +01:00
|
|
|
read_only=False, fetch_all: bool = False) -> List[dict]:
|
2020-02-25 20:15:27 +01:00
|
|
|
read_only_fn = run_read_only_fetchall if fetch_all else run_read_only_fetchone
|
2020-01-03 04:18:49 +01:00
|
|
|
parameters = parameters if parameters is not None else []
|
2020-02-21 04:11:25 +01:00
|
|
|
if read_only:
|
2020-02-25 20:15:27 +01:00
|
|
|
while self.writers:
|
|
|
|
await self.read_ready.wait()
|
2020-02-21 04:11:25 +01:00
|
|
|
return await asyncio.get_event_loop().run_in_executor(
|
2020-02-25 20:15:27 +01:00
|
|
|
self.reader_executor, read_only_fn, sql, parameters
|
2020-02-21 04:11:25 +01:00
|
|
|
)
|
2020-02-25 20:15:27 +01:00
|
|
|
if fetch_all:
|
|
|
|
return await self.run(lambda conn: conn.execute(sql, parameters).fetchall())
|
|
|
|
return await self.run(lambda conn: conn.execute(sql, parameters).fetchone())
|
2020-01-03 04:18:49 +01:00
|
|
|
|
2020-02-25 20:15:27 +01:00
|
|
|
async def execute_fetchall(self, sql: str, parameters: Iterable = None,
|
2020-03-20 06:11:05 +01:00
|
|
|
read_only=False) -> List[dict]:
|
2020-02-25 20:15:27 +01:00
|
|
|
return await self._execute_fetch(sql, parameters, read_only, fetch_all=True)
|
|
|
|
|
|
|
|
async def execute_fetchone(self, sql: str, parameters: Iterable = None,
|
2020-03-20 06:11:05 +01:00
|
|
|
read_only=False) -> List[dict]:
|
2020-02-25 20:15:27 +01:00
|
|
|
return await self._execute_fetch(sql, parameters, read_only, fetch_all=False)
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
def execute(self, sql: str, parameters: Iterable = None) -> Awaitable[sqlite3.Cursor]:
|
|
|
|
parameters = parameters if parameters is not None else []
|
|
|
|
return self.run(lambda conn: conn.execute(sql, parameters))
|
|
|
|
|
2020-02-25 20:15:27 +01:00
|
|
|
async def run(self, fun, *args, **kwargs):
|
|
|
|
self.writers += 1
|
|
|
|
self.read_ready.clear()
|
|
|
|
async with self.write_lock:
|
|
|
|
try:
|
|
|
|
return await asyncio.get_event_loop().run_in_executor(
|
|
|
|
self.writer_executor, lambda: self.__run_transaction(fun, *args, **kwargs)
|
|
|
|
)
|
|
|
|
finally:
|
|
|
|
self.writers -= 1
|
|
|
|
if not self.writers:
|
|
|
|
self.read_ready.set()
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
def __run_transaction(self, fun: Callable[[sqlite3.Connection, Any, Any], Any], *args, **kwargs):
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_connection.execute('begin')
|
2020-01-03 04:18:49 +01:00
|
|
|
try:
|
|
|
|
self.query_count += 1
|
2020-02-18 00:12:52 +01:00
|
|
|
result = fun(self.writer_connection, *args, **kwargs) # type: ignore
|
|
|
|
self.writer_connection.commit()
|
2020-01-03 04:18:49 +01:00
|
|
|
return result
|
|
|
|
except (Exception, OSError) as e:
|
|
|
|
log.exception('Error running transaction:', exc_info=e)
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_connection.rollback()
|
2020-01-03 04:18:49 +01:00
|
|
|
log.warning("rolled back")
|
|
|
|
raise
|
|
|
|
|
|
|
|
def run_with_foreign_keys_disabled(self, fun, *args, **kwargs) -> Awaitable:
|
|
|
|
return asyncio.get_event_loop().run_in_executor(
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_executor, self.__run_transaction_with_foreign_keys_disabled, fun, args, kwargs
|
2020-01-03 04:18:49 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def __run_transaction_with_foreign_keys_disabled(self,
|
|
|
|
fun: Callable[[sqlite3.Connection, Any, Any], Any],
|
|
|
|
args, kwargs):
|
2020-02-18 00:12:52 +01:00
|
|
|
foreign_keys_enabled, = self.writer_connection.execute("pragma foreign_keys").fetchone()
|
2020-01-03 04:18:49 +01:00
|
|
|
if not foreign_keys_enabled:
|
|
|
|
raise sqlite3.IntegrityError("foreign keys are disabled, use `AIOSQLite.run` instead")
|
|
|
|
try:
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_connection.execute('pragma foreign_keys=off').fetchone()
|
2020-01-03 04:18:49 +01:00
|
|
|
return self.__run_transaction(fun, *args, **kwargs)
|
|
|
|
finally:
|
2020-02-18 00:12:52 +01:00
|
|
|
self.writer_connection.execute('pragma foreign_keys=on').fetchone()
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
|
|
|
|
def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
|
|
|
|
sql, values = [], {}
|
|
|
|
for key, constraint in constraints.items():
|
|
|
|
tag = '0'
|
|
|
|
if '#' in key:
|
|
|
|
key, tag = key[:key.index('#')], key[key.index('#')+1:]
|
|
|
|
col, op, key = key, '=', key.replace('.', '_')
|
|
|
|
if not key:
|
|
|
|
sql.append(constraint)
|
|
|
|
continue
|
2020-03-07 06:34:47 +01:00
|
|
|
if key.startswith('$$'):
|
|
|
|
col, key = col[2:], key[1:]
|
|
|
|
elif key.startswith('$'):
|
2020-01-03 04:18:49 +01:00
|
|
|
values[key] = constraint
|
|
|
|
continue
|
|
|
|
if key.endswith('__not'):
|
|
|
|
col, op = col[:-len('__not')], '!='
|
|
|
|
elif key.endswith('__is_null'):
|
|
|
|
col = col[:-len('__is_null')]
|
|
|
|
sql.append(f'{col} IS NULL')
|
|
|
|
continue
|
|
|
|
if key.endswith('__is_not_null'):
|
|
|
|
col = col[:-len('__is_not_null')]
|
|
|
|
sql.append(f'{col} IS NOT NULL')
|
|
|
|
continue
|
|
|
|
if key.endswith('__lt'):
|
|
|
|
col, op = col[:-len('__lt')], '<'
|
|
|
|
elif key.endswith('__lte'):
|
|
|
|
col, op = col[:-len('__lte')], '<='
|
|
|
|
elif key.endswith('__gt'):
|
|
|
|
col, op = col[:-len('__gt')], '>'
|
|
|
|
elif key.endswith('__gte'):
|
|
|
|
col, op = col[:-len('__gte')], '>='
|
|
|
|
elif key.endswith('__like'):
|
|
|
|
col, op = col[:-len('__like')], 'LIKE'
|
|
|
|
elif key.endswith('__not_like'):
|
|
|
|
col, op = col[:-len('__not_like')], 'NOT LIKE'
|
|
|
|
elif key.endswith('__in') or key.endswith('__not_in'):
|
|
|
|
if key.endswith('__in'):
|
2020-02-12 16:31:27 +01:00
|
|
|
col, op, one_val_op = col[:-len('__in')], 'IN', '='
|
2020-01-03 04:18:49 +01:00
|
|
|
else:
|
2020-02-12 16:31:27 +01:00
|
|
|
col, op, one_val_op = col[:-len('__not_in')], 'NOT IN', '!='
|
2020-01-03 04:18:49 +01:00
|
|
|
if constraint:
|
|
|
|
if isinstance(constraint, (list, set, tuple)):
|
2020-02-12 16:31:27 +01:00
|
|
|
if len(constraint) == 1:
|
2020-02-12 17:41:32 +01:00
|
|
|
values[f'{key}{tag}'] = next(iter(constraint))
|
2020-02-12 16:31:27 +01:00
|
|
|
sql.append(f'{col} {one_val_op} :{key}{tag}')
|
|
|
|
else:
|
|
|
|
keys = []
|
|
|
|
for i, val in enumerate(constraint):
|
|
|
|
keys.append(f':{key}{tag}_{i}')
|
|
|
|
values[f'{key}{tag}_{i}'] = val
|
|
|
|
sql.append(f'{col} {op} ({", ".join(keys)})')
|
2020-01-03 04:18:49 +01:00
|
|
|
elif isinstance(constraint, str):
|
|
|
|
sql.append(f'{col} {op} ({constraint})')
|
|
|
|
else:
|
|
|
|
raise ValueError(f"{col} requires a list, set or string as constraint value.")
|
|
|
|
continue
|
|
|
|
elif key.endswith('__any') or key.endswith('__or'):
|
|
|
|
where, subvalues = constraints_to_sql(constraint, ' OR ', key+tag+'_')
|
|
|
|
sql.append(f'({where})')
|
|
|
|
values.update(subvalues)
|
|
|
|
continue
|
|
|
|
if key.endswith('__and'):
|
|
|
|
where, subvalues = constraints_to_sql(constraint, ' AND ', key+tag+'_')
|
|
|
|
sql.append(f'({where})')
|
|
|
|
values.update(subvalues)
|
|
|
|
continue
|
|
|
|
sql.append(f'{col} {op} :{prepend_key}{key}{tag}')
|
|
|
|
values[prepend_key+key+tag] = constraint
|
|
|
|
return joiner.join(sql) if sql else '', values
|
|
|
|
|
|
|
|
|
|
|
|
def query(select, **constraints) -> Tuple[str, Dict[str, Any]]:
|
|
|
|
sql = [select]
|
|
|
|
limit = constraints.pop('limit', None)
|
|
|
|
offset = constraints.pop('offset', None)
|
|
|
|
order_by = constraints.pop('order_by', None)
|
2020-03-26 05:37:13 +01:00
|
|
|
group_by = constraints.pop('group_by', None)
|
2020-01-03 04:18:49 +01:00
|
|
|
|
|
|
|
accounts = constraints.pop('accounts', [])
|
|
|
|
if accounts:
|
|
|
|
constraints['account__in'] = [a.public_key.address for a in accounts]
|
|
|
|
|
|
|
|
where, values = constraints_to_sql(constraints)
|
|
|
|
if where:
|
|
|
|
sql.append('WHERE')
|
|
|
|
sql.append(where)
|
|
|
|
|
2020-03-26 05:37:13 +01:00
|
|
|
if group_by is not None:
|
|
|
|
sql.append(f'GROUP BY {group_by}')
|
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
if order_by:
|
|
|
|
sql.append('ORDER BY')
|
|
|
|
if isinstance(order_by, str):
|
|
|
|
sql.append(order_by)
|
|
|
|
elif isinstance(order_by, list):
|
|
|
|
sql.append(', '.join(order_by))
|
|
|
|
else:
|
|
|
|
raise ValueError("order_by must be string or list")
|
|
|
|
|
|
|
|
if limit is not None:
|
|
|
|
sql.append(f'LIMIT {limit}')
|
|
|
|
|
|
|
|
if offset is not None:
|
|
|
|
sql.append(f'OFFSET {offset}')
|
|
|
|
|
|
|
|
return ' '.join(sql), values
|
|
|
|
|
|
|
|
|
|
|
|
def interpolate(sql, values):
|
|
|
|
for k in sorted(values.keys(), reverse=True):
|
|
|
|
value = values[k]
|
|
|
|
if isinstance(value, bytes):
|
|
|
|
value = f"X'{hexlify(value).decode()}'"
|
|
|
|
elif isinstance(value, str):
|
|
|
|
value = f"'{value}'"
|
|
|
|
else:
|
|
|
|
value = str(value)
|
|
|
|
sql = sql.replace(f":{k}", value)
|
|
|
|
return sql
|
|
|
|
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
def constrain_single_or_list(constraints, column, value, convert=lambda x: x):
|
|
|
|
if value is not None:
|
|
|
|
if isinstance(value, list):
|
|
|
|
value = [convert(v) for v in value]
|
|
|
|
if len(value) == 1:
|
|
|
|
constraints[column] = value[0]
|
|
|
|
elif len(value) > 1:
|
|
|
|
constraints[f"{column}__in"] = value
|
|
|
|
else:
|
|
|
|
constraints[column] = convert(value)
|
|
|
|
return constraints
|
|
|
|
|
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
class SQLiteMixin:
|
|
|
|
|
|
|
|
SCHEMA_VERSION: Optional[str] = None
|
|
|
|
CREATE_TABLES_QUERY: str
|
|
|
|
MAX_QUERY_VARIABLES = 900
|
|
|
|
|
|
|
|
CREATE_VERSION_TABLE = """
|
|
|
|
create table if not exists version (
|
|
|
|
version text
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, path):
|
|
|
|
self._db_path = path
|
|
|
|
self.db: AIOSQLite = None
|
|
|
|
self.ledger = None
|
|
|
|
|
|
|
|
async def open(self):
|
|
|
|
log.info("connecting to database: %s", self._db_path)
|
|
|
|
self.db = await AIOSQLite.connect(self._db_path, isolation_level=None)
|
|
|
|
if self.SCHEMA_VERSION:
|
|
|
|
tables = [t[0] for t in await self.db.execute_fetchall(
|
|
|
|
"SELECT name FROM sqlite_master WHERE type='table';"
|
|
|
|
)]
|
|
|
|
if tables:
|
|
|
|
if 'version' in tables:
|
|
|
|
version = await self.db.execute_fetchone("SELECT version FROM version LIMIT 1;")
|
|
|
|
if version == (self.SCHEMA_VERSION,):
|
|
|
|
return
|
|
|
|
await self.db.executescript('\n'.join(
|
|
|
|
f"DROP TABLE {table};" for table in tables
|
|
|
|
))
|
|
|
|
await self.db.execute(self.CREATE_VERSION_TABLE)
|
|
|
|
await self.db.execute("INSERT INTO version VALUES (?)", (self.SCHEMA_VERSION,))
|
|
|
|
await self.db.executescript(self.CREATE_TABLES_QUERY)
|
|
|
|
|
|
|
|
async def close(self):
|
|
|
|
await self.db.close()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _insert_sql(table: str, data: dict, ignore_duplicate: bool = False,
|
|
|
|
replace: bool = False) -> Tuple[str, List]:
|
|
|
|
columns, values = [], []
|
|
|
|
for column, value in data.items():
|
|
|
|
columns.append(column)
|
|
|
|
values.append(value)
|
|
|
|
policy = ""
|
|
|
|
if ignore_duplicate:
|
|
|
|
policy = " OR IGNORE"
|
|
|
|
if replace:
|
|
|
|
policy = " OR REPLACE"
|
|
|
|
sql = "INSERT{} INTO {} ({}) VALUES ({})".format(
|
|
|
|
policy, table, ', '.join(columns), ', '.join(['?'] * len(values))
|
|
|
|
)
|
|
|
|
return sql, values
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _update_sql(table: str, data: dict, where: str,
|
|
|
|
constraints: Union[list, tuple]) -> Tuple[str, list]:
|
|
|
|
columns, values = [], []
|
|
|
|
for column, value in data.items():
|
|
|
|
columns.append(f"{column} = ?")
|
|
|
|
values.append(value)
|
|
|
|
values.extend(constraints)
|
|
|
|
sql = "UPDATE {} SET {} WHERE {}".format(
|
|
|
|
table, ', '.join(columns), where
|
|
|
|
)
|
|
|
|
return sql, values
|
|
|
|
|
|
|
|
|
2020-03-20 06:11:05 +01:00
|
|
|
def dict_row_factory(cursor, row):
|
|
|
|
d = {}
|
|
|
|
for idx, col in enumerate(cursor.description):
|
|
|
|
d[col[0]] = row[idx]
|
|
|
|
return d
|