lbry-sdk/torba/client/basedatabase.py

518 lines
18 KiB
Python
Raw Normal View History

2018-06-08 05:47:46 +02:00
import logging
2018-10-18 22:19:47 +02:00
import asyncio
from asyncio import wrap_future
from concurrent.futures.thread import ThreadPoolExecutor
from operator import itemgetter
2018-10-18 22:19:47 +02:00
from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable
2018-06-12 16:02:04 +02:00
2018-06-08 05:47:46 +02:00
import sqlite3
2018-11-04 06:55:50 +01:00
from torba.client.hash import TXRefImmutable
from torba.client.basetransaction import BaseTransaction
from torba.client.baseaccount import BaseAccount
2018-06-12 16:02:04 +02:00
2018-06-08 05:47:46 +02:00
log = logging.getLogger(__name__)
2018-10-18 22:19:47 +02:00
class AIOSQLite:
def __init__(self):
# has to be single threaded as there is no mapping of thread:connection
self.executor = ThreadPoolExecutor(max_workers=1)
self.connection: sqlite3.Connection = None
@classmethod
async def connect(cls, path: Union[bytes, str], *args, **kwargs):
db = cls()
db.connection = await wrap_future(db.executor.submit(sqlite3.connect, path, *args, **kwargs))
return db
async def close(self):
def __close(conn):
self.executor.submit(conn.close)
self.executor.shutdown(wait=True)
conn = self.connection
self.connection = None
2018-10-18 22:25:24 +02:00
return asyncio.get_event_loop_policy().get_event_loop().call_later(0.01, __close, conn)
2018-10-18 22:19:47 +02:00
def executescript(self, script: str) -> Awaitable:
return wrap_future(self.executor.submit(self.connection.executescript, script))
def execute_fetchall(self, sql: str, parameters: Iterable = None) -> Awaitable[Iterable[sqlite3.Row]]:
parameters = parameters if parameters is not None else []
def __fetchall(conn: sqlite3.Connection, *args, **kwargs):
return conn.execute(*args, **kwargs).fetchall()
return wrap_future(self.executor.submit(__fetchall, self.connection, sql, parameters))
def execute(self, sql: str, parameters: Iterable = None) -> Awaitable[sqlite3.Cursor]:
parameters = parameters if parameters is not None else []
return self.run(lambda conn, sql, parameters: conn.execute(sql, parameters), sql, parameters)
def run(self, fun, *args, **kwargs) -> Awaitable:
2018-10-18 22:25:24 +02:00
return wrap_future(self.executor.submit(self.__run_transaction, fun, *args, **kwargs))
2018-10-18 22:19:47 +02:00
2018-10-18 22:25:24 +02:00
def __run_transaction(self, fun: Callable[[sqlite3.Connection, Any, Any], Any], *args, **kwargs):
2018-10-18 22:19:47 +02:00
self.connection.execute('begin')
try:
2018-10-18 22:25:24 +02:00
fun(self.connection, *args, **kwargs) # type: ignore
2018-10-18 22:19:47 +02:00
except (Exception, OSError):
self.connection.rollback()
raise
else:
self.connection.commit()
def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
sql, values = [], {}
for key, constraint in constraints.items():
col, op, key = key, '=', key.replace('.', '_')
if key.startswith('$'):
values[key] = constraint
continue
elif key.endswith('__not'):
col, op = col[:-len('__not')], '!='
2018-08-04 03:26:53 +02:00
elif key.endswith('__lt'):
col, op = col[:-len('__lt')], '<'
2018-08-04 03:26:53 +02:00
elif key.endswith('__lte'):
col, op = col[:-len('__lte')], '<='
2018-08-04 03:26:53 +02:00
elif key.endswith('__gt'):
col, op = col[:-len('__gt')], '>'
2018-08-04 03:26:53 +02:00
elif key.endswith('__like'):
col, op = col[:-len('__like')], 'LIKE'
2018-10-03 13:08:02 +02:00
elif key.endswith('__in') or key.endswith('__not_in'):
if key.endswith('__in'):
col, op = col[:-len('__in')], 'IN'
2018-10-03 13:08:02 +02:00
else:
col, op = col[:-len('__not_in')], 'NOT IN'
if isinstance(constraint, (list, set)):
items = ', '.join(
"'{}'".format(item) if isinstance(item, str) else str(item)
for item in constraint
)
elif isinstance(constraint, str):
items = constraint
else:
raise ValueError("{} requires a list, set or string as constraint value.".format(col))
sql.append('{} {} ({})'.format(col, op, items))
2018-10-03 13:08:02 +02:00
continue
2018-08-04 03:26:53 +02:00
elif key.endswith('__any'):
where, subvalues = constraints_to_sql(constraint, ' OR ', key+'_')
sql.append('({})'.format(where))
values.update(subvalues)
2018-08-04 03:26:53 +02:00
continue
sql.append('{} {} :{}'.format(col, op, prepend_key+key))
values[prepend_key+key] = constraint
return joiner.join(sql) if sql else '', values
def query(select, **constraints):
sql = [select]
limit = constraints.pop('limit', None)
offset = constraints.pop('offset', None)
order_by = constraints.pop('order_by', None)
constraints.pop('my_account', None)
account = constraints.pop('account', None)
if account is not None:
if not isinstance(account, list):
account = [account]
constraints['account__in'] = [
(a.public_key.address if isinstance(a, BaseAccount) else a) for a in account
]
where, values = constraints_to_sql(constraints)
if where:
sql.append('WHERE')
sql.append(where)
if order_by is not None:
sql.append('ORDER BY')
if isinstance(order_by, str):
sql.append(order_by)
elif isinstance(order_by, list):
sql.append(', '.join(order_by))
else:
raise ValueError("order_by must be string or list")
if limit is not None:
sql.append('LIMIT {}'.format(limit))
2018-10-09 20:44:26 +02:00
if offset is not None:
sql.append('OFFSET {}'.format(offset))
return ' '.join(sql), values
def rows_to_dict(rows, fields):
if rows:
return [dict(zip(fields, r)) for r in rows]
else:
return []
class SQLiteMixin:
2018-06-11 15:33:32 +02:00
2018-10-15 04:16:51 +02:00
CREATE_TABLES_QUERY: str
2018-06-11 15:33:32 +02:00
def __init__(self, path):
self._db_path = path
2018-10-18 22:19:47 +02:00
self.db: AIOSQLite = None
2018-10-03 13:08:02 +02:00
self.ledger = None
2018-06-11 15:33:32 +02:00
2018-10-15 04:16:51 +02:00
async def open(self):
2018-06-11 15:33:32 +02:00
log.info("connecting to database: %s", self._db_path)
2018-10-18 22:19:47 +02:00
self.db = await AIOSQLite.connect(self._db_path)
2018-10-15 04:16:51 +02:00
await self.db.executescript(self.CREATE_TABLES_QUERY)
2018-06-11 15:33:32 +02:00
2018-10-15 04:16:51 +02:00
async def close(self):
await self.db.close()
2018-06-11 15:33:32 +02:00
@staticmethod
def _insert_sql(table: str, data: dict) -> Tuple[str, List]:
2018-06-11 15:33:32 +02:00
columns, values = [], []
for column, value in data.items():
columns.append(column)
values.append(value)
2018-06-25 15:54:35 +02:00
sql = "INSERT INTO {} ({}) VALUES ({})".format(
2018-06-11 15:33:32 +02:00
table, ', '.join(columns), ', '.join(['?'] * len(values))
)
return sql, values
@staticmethod
2018-10-15 06:04:25 +02:00
def _update_sql(table: str, data: dict, where: str,
constraints: Union[list, tuple]) -> Tuple[str, list]:
2018-06-25 15:54:35 +02:00
columns, values = [], []
for column, value in data.items():
columns.append("{} = ?".format(column))
values.append(value)
values.extend(constraints)
sql = "UPDATE {} SET {} WHERE {}".format(
table, ', '.join(columns), where
)
return sql, values
2018-06-11 15:33:32 +02:00
class BaseDatabase(SQLiteMixin):
2018-06-08 05:47:46 +02:00
2018-10-11 05:51:00 +02:00
PRAGMAS = """
pragma journal_mode=WAL;
"""
2018-06-11 15:33:32 +02:00
CREATE_PUBKEY_ADDRESS_TABLE = """
create table if not exists pubkey_address (
address text primary key,
account text not null,
2018-06-11 15:33:32 +02:00
chain integer not null,
position integer not null,
2018-07-15 06:40:46 +02:00
pubkey blob not null,
2018-06-11 15:33:32 +02:00
history text,
2018-06-14 02:57:57 +02:00
used_times integer not null default 0
2018-06-08 05:47:46 +02:00
);
"""
2018-10-10 04:52:43 +02:00
CREATE_PUBKEY_ADDRESS_INDEX = """
create index if not exists pubkey_address_account_idx on pubkey_address (account);
"""
2018-06-08 05:47:46 +02:00
CREATE_TX_TABLE = """
create table if not exists tx (
txid text primary key,
raw blob not null,
height integer not null,
position integer not null,
is_verified boolean not null default 0
);
"""
2018-06-08 05:47:46 +02:00
CREATE_TXO_TABLE = """
create table if not exists txo (
txid text references tx,
txoid text primary key,
address text references pubkey_address,
2018-06-11 15:33:32 +02:00
position integer not null,
2018-06-08 05:47:46 +02:00
amount integer not null,
script blob not null,
is_reserved boolean not null default 0
2018-06-08 05:47:46 +02:00
);
"""
2018-10-10 04:52:43 +02:00
CREATE_TXO_INDEX = """
create index if not exists txo_address_idx on txo (address);
"""
2018-06-08 05:47:46 +02:00
CREATE_TXI_TABLE = """
create table if not exists txi (
txid text references tx,
txoid text references txo,
address text references pubkey_address
2018-06-08 05:47:46 +02:00
);
"""
2018-10-10 04:52:43 +02:00
CREATE_TXI_INDEX = """
create index if not exists txi_address_idx on txi (address);
2018-10-10 05:39:00 +02:00
create index if not exists txi_txoid_idx on txi (txoid);
2018-10-10 04:52:43 +02:00
"""
2018-06-08 05:47:46 +02:00
CREATE_TABLES_QUERY = (
2018-10-10 22:23:50 +02:00
PRAGMAS +
2018-06-08 05:47:46 +02:00
CREATE_TX_TABLE +
2018-06-11 15:33:32 +02:00
CREATE_PUBKEY_ADDRESS_TABLE +
2018-10-10 04:52:43 +02:00
CREATE_PUBKEY_ADDRESS_INDEX +
2018-06-08 05:47:46 +02:00
CREATE_TXO_TABLE +
2018-10-10 04:52:43 +02:00
CREATE_TXO_INDEX +
CREATE_TXI_TABLE +
CREATE_TXI_INDEX
2018-06-08 05:47:46 +02:00
)
@staticmethod
def txo_to_row(tx, address, txo):
return {
'txid': tx.id,
'txoid': txo.id,
'address': address,
'position': txo.position,
'amount': txo.amount,
'script': sqlite3.Binary(txo.script.source)
}
def save_transaction_io(self, save_tx, tx: BaseTransaction, address, txhash, history):
def _transaction(conn: sqlite3.Connection, save_tx, tx: BaseTransaction, address, txhash, history):
if save_tx == 'insert':
conn.execute(*self._insert_sql('tx', {
'txid': tx.id,
'raw': sqlite3.Binary(tx.raw),
'height': tx.height,
'position': tx.position,
'is_verified': tx.is_verified
2018-06-11 15:33:32 +02:00
}))
elif save_tx == 'update':
conn.execute(*self._update_sql("tx", {
'height': tx.height, 'position': tx.position, 'is_verified': tx.is_verified
}, 'txid = ?', (tx.id,)))
existing_txos = set(map(itemgetter(0), conn.execute(*query(
"SELECT position FROM txo", txid=tx.id
))))
for txo in tx.outputs:
if txo.position in existing_txos:
continue
if txo.script.is_pay_pubkey_hash and txo.script.values['pubkey_hash'] == txhash:
2018-10-19 04:13:26 +02:00
conn.execute(*self._insert_sql("txo", self.txo_to_row(tx, address, txo)))
elif txo.script.is_pay_script_hash:
# TODO: implement script hash payments
log.warning('Database.save_transaction_io: pay script hash is not implemented!')
# lookup the address associated with each TXI (via its TXO)
txoid_to_address = {r[0]: r[1] for r in conn.execute(*query(
"SELECT txoid, address FROM txo", txoid__in=[txi.txo_ref.id for txi in tx.inputs]
))}
# list of TXIs that have already been added
existing_txis = {r[0] for r in conn.execute(*query(
"SELECT txoid FROM txi", txid=tx.id
))}
for txi in tx.inputs:
txoid = txi.txo_ref.id
new_txi = txoid not in existing_txis
address_matches = txoid_to_address.get(txoid) == address
if new_txi and address_matches:
2018-10-19 04:13:26 +02:00
conn.execute(*self._insert_sql("txi", {
'txid': tx.id,
'txoid': txoid,
'address': address,
}))
conn.execute(
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
(history, history.count(':')//2, address)
)
return self.db.run(_transaction, save_tx, tx, address, txhash, history)
2018-06-08 05:47:46 +02:00
2018-10-15 04:16:51 +02:00
async def reserve_outputs(self, txos, is_reserved=True):
txoids = [txo.id for txo in txos]
2018-10-15 04:16:51 +02:00
await self.db.execute(
"UPDATE txo SET is_reserved = ? WHERE txoid IN ({})".format(
', '.join(['?']*len(txoids))
), [is_reserved]+txoids
)
2018-10-15 04:16:51 +02:00
async def release_outputs(self, txos):
await self.reserve_outputs(txos, is_reserved=False)
2018-10-15 04:16:51 +02:00
async def rewind_blockchain(self, above_height): # pylint: disable=no-self-use
2018-08-17 03:41:22 +02:00
# TODO:
# 1. delete transactions above_height
# 2. update address histories removing deleted TXs
2018-10-15 04:16:51 +02:00
return True
2018-08-17 03:41:22 +02:00
2018-10-15 04:16:51 +02:00
async def select_transactions(self, cols, account=None, **constraints):
if 'txid' not in constraints and account is not None:
constraints['$account'] = account.public_key.address
constraints['txid__in'] = """
SELECT txo.txid FROM txo
JOIN pubkey_address USING (address) WHERE pubkey_address.account = :$account
UNION
SELECT txi.txid FROM txi
JOIN pubkey_address USING (address) WHERE pubkey_address.account = :$account
2018-10-03 13:08:02 +02:00
"""
2018-10-15 04:16:51 +02:00
return await self.db.execute_fetchall(
*query("SELECT {} FROM tx".format(cols), **constraints)
)
2018-10-03 13:08:02 +02:00
2018-10-15 04:16:51 +02:00
async def get_transactions(self, my_account=None, **constraints):
my_account = my_account or constraints.get('account', None)
2018-10-15 04:16:51 +02:00
tx_rows = await self.select_transactions(
'txid, raw, height, position, is_verified',
order_by=["height DESC", "position DESC"],
**constraints
2018-09-21 15:47:31 +02:00
)
if not tx_rows:
return []
2018-09-22 04:18:30 +02:00
txids, txs = [], []
2018-09-22 04:26:07 +02:00
for row in tx_rows:
txids.append(row[0])
2018-10-03 13:08:02 +02:00
txs.append(self.ledger.transaction_class(
2018-10-09 04:44:30 +02:00
raw=row[1], height=row[2], position=row[3], is_verified=bool(row[4])
))
2018-09-22 04:18:30 +02:00
annotated_txos = {
txo.id: txo for txo in
2018-10-15 04:16:51 +02:00
(await self.get_txos(
my_account=my_account,
txid__in=txids
))
}
referenced_txos = {
txo.id: txo for txo in
2018-10-15 04:16:51 +02:00
(await self.get_txos(
my_account=my_account,
txoid__in=query("SELECT txoid FROM txi", **{'txid__in': txids})[0]
))
}
2018-09-22 04:18:30 +02:00
for tx in txs:
for txi in tx.inputs:
txo = referenced_txos.get(txi.txo_ref.id)
if txo:
txi.txo_ref = txo.ref
2018-09-22 04:18:30 +02:00
for txo in tx.outputs:
_txo = annotated_txos.get(txo.id)
if _txo:
txo.update_annotations(_txo)
2018-10-05 01:42:29 +02:00
else:
txo.update_annotations(None)
2018-09-22 04:18:30 +02:00
return txs
2018-09-21 15:47:31 +02:00
2018-10-15 04:16:51 +02:00
async def get_transaction_count(self, **constraints):
constraints.pop('offset', None)
constraints.pop('limit', None)
constraints.pop('order_by', None)
2018-10-15 04:16:51 +02:00
count = await self.select_transactions('count(*)', **constraints)
return count[0][0]
2018-10-15 04:16:51 +02:00
async def get_transaction(self, **constraints):
txs = await self.get_transactions(limit=1, **constraints)
if txs:
return txs[0]
2018-10-15 04:16:51 +02:00
async def select_txos(self, cols, **constraints):
return await self.db.execute_fetchall(*query(
2018-10-08 16:53:53 +02:00
"SELECT {} FROM txo"
" JOIN pubkey_address USING (address)"
" JOIN tx USING (txid)".format(cols), **constraints
))
2018-10-15 04:16:51 +02:00
async def get_txos(self, my_account=None, **constraints):
my_account = my_account or constraints.get('account', None)
if isinstance(my_account, BaseAccount):
my_account = my_account.public_key.address
2018-10-15 04:16:51 +02:00
rows = await self.select_txos(
2018-10-11 03:29:29 +02:00
"amount, script, txid, tx.height, txo.position, chain, account", **constraints
2018-10-03 13:08:02 +02:00
)
output_class = self.ledger.transaction_class.output_class
return [
output_class(
amount=row[0],
script=output_class.script_class(row[1]),
2018-10-11 03:29:29 +02:00
tx_ref=TXRefImmutable.from_id(row[2], row[3]),
position=row[4],
is_change=row[5] == 1,
is_my_account=row[6] == my_account
2018-10-03 13:08:02 +02:00
) for row in rows
]
2018-10-15 04:16:51 +02:00
async def get_txo_count(self, **constraints):
constraints.pop('offset', None)
constraints.pop('limit', None)
constraints.pop('order_by', None)
2018-10-15 04:16:51 +02:00
count = await self.select_txos('count(*)', **constraints)
return count[0][0]
@staticmethod
def constrain_utxo(constraints):
constraints['is_reserved'] = False
constraints['txoid__not_in'] = "SELECT txoid FROM txi"
def get_utxos(self, **constraints):
self.constrain_utxo(constraints)
2018-10-03 13:08:02 +02:00
return self.get_txos(**constraints)
def get_utxo_count(self, **constraints):
self.constrain_utxo(constraints)
return self.get_txo_count(**constraints)
2018-10-15 04:16:51 +02:00
async def get_balance(self, **constraints):
self.constrain_utxo(constraints)
2018-10-15 04:16:51 +02:00
balance = await self.select_txos('SUM(amount)', **constraints)
return balance[0][0] or 0
2018-10-15 04:16:51 +02:00
async def select_addresses(self, cols, **constraints):
return await self.db.execute_fetchall(*query(
2018-10-15 06:04:25 +02:00
"SELECT {} FROM pubkey_address".format(cols), **constraints
))
2018-10-15 06:04:25 +02:00
async def get_addresses(self, cols=('address', 'account', 'chain', 'position', 'used_times'),
**constraints):
2018-10-15 04:16:51 +02:00
addresses = await self.select_addresses(', '.join(cols), **constraints)
return rows_to_dict(addresses, cols)
2018-10-15 04:16:51 +02:00
async def get_address_count(self, **constraints):
count = await self.select_addresses('count(*)', **constraints)
return count[0][0]
2018-10-15 04:16:51 +02:00
async def get_address(self, **constraints):
addresses = await self.get_addresses(
cols=('address', 'account', 'chain', 'position', 'pubkey', 'history', 'used_times'),
2018-10-09 04:44:30 +02:00
limit=1, **constraints
2018-06-08 05:47:46 +02:00
)
if addresses:
return addresses[0]
2018-06-08 05:47:46 +02:00
2018-10-15 04:16:51 +02:00
async def add_keys(self, account, chain, keys):
2018-06-11 15:33:32 +02:00
sql = (
"insert into pubkey_address "
"(address, account, chain, position, pubkey) "
"values "
) + ', '.join(['(?, ?, ?, ?, ?)'] * len(keys))
2018-06-11 15:33:32 +02:00
values = []
for position, pubkey in keys:
2018-10-11 05:51:00 +02:00
values.extend((
2018-10-10 22:24:24 +02:00
pubkey.address, account.public_key.address, chain, position,
sqlite3.Binary(pubkey.pubkey_bytes)
2018-10-11 05:51:00 +02:00
))
2018-10-15 04:16:51 +02:00
await self.db.execute(sql, values)
2018-06-11 15:33:32 +02:00
2018-10-15 04:16:51 +02:00
async def _set_address_history(self, address, history):
await self.db.execute(
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
(history, history.count(':')//2, address)
2018-06-27 00:31:42 +02:00
)
2018-10-15 04:16:51 +02:00
async def set_address_history(self, address, history):
await self._set_address_history(address, history)