2018-06-08 05:47:46 +02:00
|
|
|
import logging
|
2018-10-18 22:19:47 +02:00
|
|
|
import asyncio
|
|
|
|
from asyncio import wrap_future
|
|
|
|
from concurrent.futures.thread import ThreadPoolExecutor
|
|
|
|
|
|
|
|
from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable
|
2018-06-12 16:02:04 +02:00
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
import sqlite3
|
|
|
|
|
2018-11-04 06:55:50 +01:00
|
|
|
from torba.client.basetransaction import BaseTransaction
|
|
|
|
from torba.client.baseaccount import BaseAccount
|
2018-06-12 16:02:04 +02:00
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2018-10-18 22:19:47 +02:00
|
|
|
class AIOSQLite:
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
# has to be single threaded as there is no mapping of thread:connection
|
|
|
|
self.executor = ThreadPoolExecutor(max_workers=1)
|
|
|
|
self.connection: sqlite3.Connection = None
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
async def connect(cls, path: Union[bytes, str], *args, **kwargs):
|
|
|
|
db = cls()
|
|
|
|
db.connection = await wrap_future(db.executor.submit(sqlite3.connect, path, *args, **kwargs))
|
|
|
|
return db
|
|
|
|
|
|
|
|
async def close(self):
|
|
|
|
def __close(conn):
|
|
|
|
self.executor.submit(conn.close)
|
|
|
|
self.executor.shutdown(wait=True)
|
|
|
|
conn = self.connection
|
|
|
|
self.connection = None
|
2018-10-18 22:25:24 +02:00
|
|
|
return asyncio.get_event_loop_policy().get_event_loop().call_later(0.01, __close, conn)
|
2018-10-18 22:19:47 +02:00
|
|
|
|
2019-01-29 03:36:16 +01:00
|
|
|
def executemany(self, sql: str, params: Iterable):
|
|
|
|
def __executemany_in_a_transaction(conn: sqlite3.Connection, *args, **kwargs):
|
|
|
|
return conn.executemany(*args, **kwargs)
|
|
|
|
return self.run(__executemany_in_a_transaction, sql, params)
|
|
|
|
|
2018-10-18 22:19:47 +02:00
|
|
|
def executescript(self, script: str) -> Awaitable:
|
|
|
|
return wrap_future(self.executor.submit(self.connection.executescript, script))
|
|
|
|
|
|
|
|
def execute_fetchall(self, sql: str, parameters: Iterable = None) -> Awaitable[Iterable[sqlite3.Row]]:
|
|
|
|
parameters = parameters if parameters is not None else []
|
|
|
|
def __fetchall(conn: sqlite3.Connection, *args, **kwargs):
|
|
|
|
return conn.execute(*args, **kwargs).fetchall()
|
|
|
|
return wrap_future(self.executor.submit(__fetchall, self.connection, sql, parameters))
|
|
|
|
|
|
|
|
def execute(self, sql: str, parameters: Iterable = None) -> Awaitable[sqlite3.Cursor]:
|
|
|
|
parameters = parameters if parameters is not None else []
|
|
|
|
return self.run(lambda conn, sql, parameters: conn.execute(sql, parameters), sql, parameters)
|
|
|
|
|
2018-10-19 03:59:41 +02:00
|
|
|
def run(self, fun, *args, **kwargs) -> Awaitable:
|
2018-10-18 22:25:24 +02:00
|
|
|
return wrap_future(self.executor.submit(self.__run_transaction, fun, *args, **kwargs))
|
2018-10-18 22:19:47 +02:00
|
|
|
|
2018-10-18 22:25:24 +02:00
|
|
|
def __run_transaction(self, fun: Callable[[sqlite3.Connection, Any, Any], Any], *args, **kwargs):
|
2018-10-18 22:19:47 +02:00
|
|
|
self.connection.execute('begin')
|
|
|
|
try:
|
2018-12-15 21:38:58 +01:00
|
|
|
result = fun(self.connection, *args, **kwargs) # type: ignore
|
|
|
|
self.connection.commit()
|
|
|
|
return result
|
2019-01-10 23:17:12 +01:00
|
|
|
except (Exception, OSError): # as e:
|
2019-01-10 23:02:57 +01:00
|
|
|
#log.exception('Error running transaction:', exc_info=e)
|
2018-10-18 22:19:47 +02:00
|
|
|
self.connection.rollback()
|
|
|
|
raise
|
|
|
|
|
2019-02-14 04:34:07 +01:00
|
|
|
def run_with_foreign_keys_disabled(self, fun, *args, **kwargs) -> Awaitable:
|
|
|
|
return wrap_future(
|
|
|
|
self.executor.submit(self.__run_transaction_with_foreign_keys_disabled, fun, *args, **kwargs)
|
|
|
|
)
|
|
|
|
|
2019-02-14 04:42:29 +01:00
|
|
|
def __run_transaction_with_foreign_keys_disabled(self,
|
|
|
|
fun: Callable[[sqlite3.Connection, Any, Any], Any],
|
|
|
|
*args, **kwargs):
|
2019-02-14 04:34:07 +01:00
|
|
|
foreign_keys_enabled, = self.connection.execute("pragma foreign_keys").fetchone()
|
|
|
|
if not foreign_keys_enabled:
|
|
|
|
raise sqlite3.IntegrityError("foreign keys are disabled, use `AIOSQLite.run` instead")
|
2019-02-13 23:46:52 +01:00
|
|
|
try:
|
|
|
|
self.connection.execute('pragma foreign_keys=off')
|
2019-02-14 04:34:07 +01:00
|
|
|
return self.__run_transaction(fun, *args, **kwargs)
|
2019-02-13 23:46:52 +01:00
|
|
|
finally:
|
|
|
|
self.connection.execute('pragma foreign_keys=on')
|
|
|
|
|
2018-10-18 22:19:47 +02:00
|
|
|
|
2018-10-07 20:53:44 +02:00
|
|
|
def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
|
|
|
|
sql, values = [], {}
|
|
|
|
for key, constraint in constraints.items():
|
2019-05-06 23:23:33 +02:00
|
|
|
tag = '0'
|
|
|
|
if '#' in key:
|
|
|
|
key, tag = key[:key.index('#')], key[key.index('#')+1:]
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op, key = key, '=', key.replace('.', '_')
|
|
|
|
if key.startswith('$'):
|
|
|
|
values[key] = constraint
|
|
|
|
continue
|
|
|
|
elif key.endswith('__not'):
|
|
|
|
col, op = col[:-len('__not')], '!='
|
2019-04-07 00:08:33 +02:00
|
|
|
elif key.endswith('__is_null'):
|
|
|
|
col = col[:-len('__is_null')]
|
|
|
|
sql.append(f'{col} IS NULL')
|
|
|
|
continue
|
|
|
|
elif key.endswith('__is_not_null'):
|
|
|
|
col = col[:-len('__is_not_null')]
|
|
|
|
sql.append(f'{col} IS NOT NULL')
|
|
|
|
continue
|
2018-08-04 03:26:53 +02:00
|
|
|
elif key.endswith('__lt'):
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__lt')], '<'
|
2018-08-04 03:26:53 +02:00
|
|
|
elif key.endswith('__lte'):
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__lte')], '<='
|
2018-08-04 03:26:53 +02:00
|
|
|
elif key.endswith('__gt'):
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__gt')], '>'
|
2019-05-18 05:52:27 +02:00
|
|
|
elif key.endswith('__gte'):
|
|
|
|
col, op = col[:-len('__gte')], '>='
|
2018-08-04 03:26:53 +02:00
|
|
|
elif key.endswith('__like'):
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__like')], 'LIKE'
|
2019-03-26 03:04:48 +01:00
|
|
|
elif key.endswith('__not_like'):
|
|
|
|
col, op = col[:-len('__not_like')], 'NOT LIKE'
|
2018-10-03 13:08:02 +02:00
|
|
|
elif key.endswith('__in') or key.endswith('__not_in'):
|
|
|
|
if key.endswith('__in'):
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__in')], 'IN'
|
2018-10-03 13:08:02 +02:00
|
|
|
else:
|
2018-10-07 20:53:44 +02:00
|
|
|
col, op = col[:-len('__not_in')], 'NOT IN'
|
2019-04-07 00:08:33 +02:00
|
|
|
if constraint:
|
|
|
|
if isinstance(constraint, (list, set, tuple)):
|
|
|
|
keys = []
|
|
|
|
for i, val in enumerate(constraint):
|
2019-05-06 23:23:33 +02:00
|
|
|
keys.append(f':{key}{tag}_{i}')
|
|
|
|
values[f'{key}{tag}_{i}'] = val
|
2019-04-07 00:08:33 +02:00
|
|
|
sql.append(f'{col} {op} ({", ".join(keys)})')
|
|
|
|
elif isinstance(constraint, str):
|
|
|
|
sql.append(f'{col} {op} ({constraint})')
|
|
|
|
else:
|
|
|
|
raise ValueError(f"{col} requires a list, set or string as constraint value.")
|
2018-10-03 13:08:02 +02:00
|
|
|
continue
|
2019-05-16 07:34:54 +02:00
|
|
|
elif key.endswith('__any') or key.endswith('__or'):
|
2019-05-06 23:23:33 +02:00
|
|
|
where, subvalues = constraints_to_sql(constraint, ' OR ', key+tag+'_')
|
2019-04-07 00:08:33 +02:00
|
|
|
sql.append(f'({where})')
|
2018-10-07 20:53:44 +02:00
|
|
|
values.update(subvalues)
|
2018-08-04 03:26:53 +02:00
|
|
|
continue
|
2019-05-16 07:34:54 +02:00
|
|
|
elif key.endswith('__and'):
|
|
|
|
where, subvalues = constraints_to_sql(constraint, ' AND ', key+tag+'_')
|
|
|
|
sql.append(f'({where})')
|
|
|
|
values.update(subvalues)
|
|
|
|
continue
|
2019-05-06 23:23:33 +02:00
|
|
|
sql.append(f'{col} {op} :{prepend_key}{key}{tag}')
|
|
|
|
values[prepend_key+key+tag] = constraint
|
2018-10-07 20:53:44 +02:00
|
|
|
return joiner.join(sql) if sql else '', values
|
|
|
|
|
|
|
|
|
|
|
|
def query(select, **constraints):
|
|
|
|
sql = [select]
|
|
|
|
limit = constraints.pop('limit', None)
|
|
|
|
offset = constraints.pop('offset', None)
|
|
|
|
order_by = constraints.pop('order_by', None)
|
|
|
|
|
|
|
|
constraints.pop('my_account', None)
|
|
|
|
account = constraints.pop('account', None)
|
|
|
|
if account is not None:
|
|
|
|
if not isinstance(account, list):
|
|
|
|
account = [account]
|
|
|
|
constraints['account__in'] = [
|
|
|
|
(a.public_key.address if isinstance(a, BaseAccount) else a) for a in account
|
|
|
|
]
|
|
|
|
|
|
|
|
where, values = constraints_to_sql(constraints)
|
|
|
|
if where:
|
|
|
|
sql.append('WHERE')
|
|
|
|
sql.append(where)
|
|
|
|
|
2019-05-18 05:52:27 +02:00
|
|
|
if order_by:
|
2018-10-07 20:53:44 +02:00
|
|
|
sql.append('ORDER BY')
|
|
|
|
if isinstance(order_by, str):
|
|
|
|
sql.append(order_by)
|
|
|
|
elif isinstance(order_by, list):
|
|
|
|
sql.append(', '.join(order_by))
|
|
|
|
else:
|
|
|
|
raise ValueError("order_by must be string or list")
|
|
|
|
|
|
|
|
if limit is not None:
|
|
|
|
sql.append('LIMIT {}'.format(limit))
|
|
|
|
|
2018-10-09 20:44:26 +02:00
|
|
|
if offset is not None:
|
|
|
|
sql.append('OFFSET {}'.format(offset))
|
|
|
|
|
2018-10-07 20:53:44 +02:00
|
|
|
return ' '.join(sql), values
|
|
|
|
|
|
|
|
|
|
|
|
def rows_to_dict(rows, fields):
|
|
|
|
if rows:
|
|
|
|
return [dict(zip(fields, r)) for r in rows]
|
|
|
|
else:
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
class SQLiteMixin:
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
CREATE_TABLES_QUERY: str
|
2018-06-11 15:33:32 +02:00
|
|
|
|
|
|
|
def __init__(self, path):
|
|
|
|
self._db_path = path
|
2018-10-18 22:19:47 +02:00
|
|
|
self.db: AIOSQLite = None
|
2018-10-03 13:08:02 +02:00
|
|
|
self.ledger = None
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def open(self):
|
2018-06-11 15:33:32 +02:00
|
|
|
log.info("connecting to database: %s", self._db_path)
|
2018-10-18 22:19:47 +02:00
|
|
|
self.db = await AIOSQLite.connect(self._db_path)
|
2018-10-15 04:16:51 +02:00
|
|
|
await self.db.executescript(self.CREATE_TABLES_QUERY)
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def close(self):
|
|
|
|
await self.db.close()
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
@staticmethod
|
2018-11-19 04:54:00 +01:00
|
|
|
def _insert_sql(table: str, data: dict, ignore_duplicate: bool = False) -> Tuple[str, List]:
|
2018-06-11 15:33:32 +02:00
|
|
|
columns, values = [], []
|
|
|
|
for column, value in data.items():
|
|
|
|
columns.append(column)
|
|
|
|
values.append(value)
|
2018-11-19 04:54:00 +01:00
|
|
|
or_ignore = ""
|
|
|
|
if ignore_duplicate:
|
|
|
|
or_ignore = " OR IGNORE"
|
|
|
|
sql = "INSERT{} INTO {} ({}) VALUES ({})".format(
|
|
|
|
or_ignore, table, ', '.join(columns), ', '.join(['?'] * len(values))
|
2018-06-11 15:33:32 +02:00
|
|
|
)
|
|
|
|
return sql, values
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
@staticmethod
|
2018-10-15 06:04:25 +02:00
|
|
|
def _update_sql(table: str, data: dict, where: str,
|
|
|
|
constraints: Union[list, tuple]) -> Tuple[str, list]:
|
2018-06-25 15:54:35 +02:00
|
|
|
columns, values = [], []
|
|
|
|
for column, value in data.items():
|
|
|
|
columns.append("{} = ?".format(column))
|
|
|
|
values.append(value)
|
|
|
|
values.extend(constraints)
|
|
|
|
sql = "UPDATE {} SET {} WHERE {}".format(
|
|
|
|
table, ', '.join(columns), where
|
|
|
|
)
|
|
|
|
return sql, values
|
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
|
|
|
|
class BaseDatabase(SQLiteMixin):
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-10-11 05:51:00 +02:00
|
|
|
PRAGMAS = """
|
|
|
|
pragma journal_mode=WAL;
|
|
|
|
"""
|
|
|
|
|
2018-06-11 15:33:32 +02:00
|
|
|
CREATE_PUBKEY_ADDRESS_TABLE = """
|
|
|
|
create table if not exists pubkey_address (
|
2018-07-14 23:47:51 +02:00
|
|
|
address text primary key,
|
|
|
|
account text not null,
|
2018-06-11 15:33:32 +02:00
|
|
|
chain integer not null,
|
|
|
|
position integer not null,
|
2018-07-15 06:40:46 +02:00
|
|
|
pubkey blob not null,
|
2018-06-11 15:33:32 +02:00
|
|
|
history text,
|
2018-06-14 02:57:57 +02:00
|
|
|
used_times integer not null default 0
|
2018-06-08 05:47:46 +02:00
|
|
|
);
|
|
|
|
"""
|
2018-10-10 04:52:43 +02:00
|
|
|
CREATE_PUBKEY_ADDRESS_INDEX = """
|
|
|
|
create index if not exists pubkey_address_account_idx on pubkey_address (account);
|
|
|
|
"""
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-07-14 23:47:51 +02:00
|
|
|
CREATE_TX_TABLE = """
|
|
|
|
create table if not exists tx (
|
|
|
|
txid text primary key,
|
|
|
|
raw blob not null,
|
|
|
|
height integer not null,
|
2018-09-26 00:02:50 +02:00
|
|
|
position integer not null,
|
2018-07-14 23:47:51 +02:00
|
|
|
is_verified boolean not null default 0
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
|
2018-06-08 05:47:46 +02:00
|
|
|
CREATE_TXO_TABLE = """
|
|
|
|
create table if not exists txo (
|
2018-07-14 23:47:51 +02:00
|
|
|
txid text references tx,
|
2018-07-15 03:34:07 +02:00
|
|
|
txoid text primary key,
|
2018-07-14 23:47:51 +02:00
|
|
|
address text references pubkey_address,
|
2018-06-11 15:33:32 +02:00
|
|
|
position integer not null,
|
2018-06-08 05:47:46 +02:00
|
|
|
amount integer not null,
|
2018-06-26 23:22:05 +02:00
|
|
|
script blob not null,
|
|
|
|
is_reserved boolean not null default 0
|
2018-06-08 05:47:46 +02:00
|
|
|
);
|
|
|
|
"""
|
2018-10-10 04:52:43 +02:00
|
|
|
CREATE_TXO_INDEX = """
|
|
|
|
create index if not exists txo_address_idx on txo (address);
|
|
|
|
"""
|
2018-06-08 05:47:46 +02:00
|
|
|
|
|
|
|
CREATE_TXI_TABLE = """
|
|
|
|
create table if not exists txi (
|
2018-07-14 23:47:51 +02:00
|
|
|
txid text references tx,
|
|
|
|
txoid text references txo,
|
|
|
|
address text references pubkey_address
|
2018-06-08 05:47:46 +02:00
|
|
|
);
|
|
|
|
"""
|
2018-10-10 04:52:43 +02:00
|
|
|
CREATE_TXI_INDEX = """
|
|
|
|
create index if not exists txi_address_idx on txi (address);
|
2018-10-10 05:39:00 +02:00
|
|
|
create index if not exists txi_txoid_idx on txi (txoid);
|
2018-10-10 04:52:43 +02:00
|
|
|
"""
|
2018-06-08 05:47:46 +02:00
|
|
|
|
|
|
|
CREATE_TABLES_QUERY = (
|
2018-10-10 22:23:50 +02:00
|
|
|
PRAGMAS +
|
2018-06-08 05:47:46 +02:00
|
|
|
CREATE_TX_TABLE +
|
2018-06-11 15:33:32 +02:00
|
|
|
CREATE_PUBKEY_ADDRESS_TABLE +
|
2018-10-10 04:52:43 +02:00
|
|
|
CREATE_PUBKEY_ADDRESS_INDEX +
|
2018-06-08 05:47:46 +02:00
|
|
|
CREATE_TXO_TABLE +
|
2018-10-10 04:52:43 +02:00
|
|
|
CREATE_TXO_INDEX +
|
|
|
|
CREATE_TXI_TABLE +
|
|
|
|
CREATE_TXI_INDEX
|
2018-06-08 05:47:46 +02:00
|
|
|
)
|
|
|
|
|
2018-07-29 02:52:54 +02:00
|
|
|
@staticmethod
|
|
|
|
def txo_to_row(tx, address, txo):
|
2018-07-09 15:55:59 +02:00
|
|
|
return {
|
2018-07-15 03:34:07 +02:00
|
|
|
'txid': tx.id,
|
|
|
|
'txoid': txo.id,
|
|
|
|
'address': address,
|
2018-07-14 23:47:51 +02:00
|
|
|
'position': txo.position,
|
2018-07-09 15:55:59 +02:00
|
|
|
'amount': txo.amount,
|
|
|
|
'script': sqlite3.Binary(txo.script.source)
|
|
|
|
}
|
|
|
|
|
2018-11-19 04:54:00 +01:00
|
|
|
async def insert_transaction(self, tx):
|
|
|
|
await self.db.execute(*self._insert_sql('tx', {
|
|
|
|
'txid': tx.id,
|
|
|
|
'raw': sqlite3.Binary(tx.raw),
|
|
|
|
'height': tx.height,
|
|
|
|
'position': tx.position,
|
|
|
|
'is_verified': tx.is_verified
|
|
|
|
}))
|
|
|
|
|
|
|
|
async def update_transaction(self, tx):
|
|
|
|
await self.db.execute(*self._update_sql("tx", {
|
|
|
|
'height': tx.height, 'position': tx.position, 'is_verified': tx.is_verified
|
|
|
|
}, 'txid = ?', (tx.id,)))
|
|
|
|
|
|
|
|
def save_transaction_io(self, tx: BaseTransaction, address, txhash, history):
|
|
|
|
|
|
|
|
def _transaction(conn: sqlite3.Connection, tx: BaseTransaction, address, txhash, history):
|
2018-10-19 03:59:41 +02:00
|
|
|
|
|
|
|
for txo in tx.outputs:
|
|
|
|
if txo.script.is_pay_pubkey_hash and txo.script.values['pubkey_hash'] == txhash:
|
2018-11-19 04:54:00 +01:00
|
|
|
conn.execute(*self._insert_sql(
|
|
|
|
"txo", self.txo_to_row(tx, address, txo), ignore_duplicate=True
|
|
|
|
))
|
2018-10-19 03:59:41 +02:00
|
|
|
elif txo.script.is_pay_script_hash:
|
|
|
|
# TODO: implement script hash payments
|
|
|
|
log.warning('Database.save_transaction_io: pay script hash is not implemented!')
|
|
|
|
|
|
|
|
for txi in tx.inputs:
|
2018-11-19 04:54:00 +01:00
|
|
|
if txi.txo_ref.txo is not None:
|
|
|
|
txo = txi.txo_ref.txo
|
|
|
|
if txo.get_address(self.ledger) == address:
|
|
|
|
conn.execute(*self._insert_sql("txi", {
|
|
|
|
'txid': tx.id,
|
|
|
|
'txoid': txo.id,
|
|
|
|
'address': address,
|
|
|
|
}, ignore_duplicate=True))
|
|
|
|
|
2018-10-19 03:59:41 +02:00
|
|
|
conn.execute(
|
|
|
|
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
|
|
|
|
(history, history.count(':')//2, address)
|
|
|
|
)
|
2018-11-19 04:54:00 +01:00
|
|
|
|
|
|
|
return self.db.run(_transaction, tx, address, txhash, history)
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def reserve_outputs(self, txos, is_reserved=True):
|
2019-01-29 03:36:16 +01:00
|
|
|
txoids = ((is_reserved, txo.id) for txo in txos)
|
|
|
|
await self.db.executemany("UPDATE txo SET is_reserved = ? WHERE txoid = ?", txoids)
|
2018-06-26 23:22:05 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def release_outputs(self, txos):
|
|
|
|
await self.reserve_outputs(txos, is_reserved=False)
|
2018-06-26 23:22:05 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def rewind_blockchain(self, above_height): # pylint: disable=no-self-use
|
2018-08-17 03:41:22 +02:00
|
|
|
# TODO:
|
|
|
|
# 1. delete transactions above_height
|
|
|
|
# 2. update address histories removing deleted TXs
|
2018-10-15 04:16:51 +02:00
|
|
|
return True
|
2018-08-17 03:41:22 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def select_transactions(self, cols, account=None, **constraints):
|
2018-10-05 01:27:39 +02:00
|
|
|
if 'txid' not in constraints and account is not None:
|
2018-10-07 20:53:44 +02:00
|
|
|
constraints['$account'] = account.public_key.address
|
2018-10-05 01:27:39 +02:00
|
|
|
constraints['txid__in'] = """
|
|
|
|
SELECT txo.txid FROM txo
|
2018-10-07 20:53:44 +02:00
|
|
|
JOIN pubkey_address USING (address) WHERE pubkey_address.account = :$account
|
2018-10-05 01:27:39 +02:00
|
|
|
UNION
|
|
|
|
SELECT txi.txid FROM txi
|
2018-10-07 20:53:44 +02:00
|
|
|
JOIN pubkey_address USING (address) WHERE pubkey_address.account = :$account
|
2018-10-03 13:08:02 +02:00
|
|
|
"""
|
2018-10-15 04:16:51 +02:00
|
|
|
return await self.db.execute_fetchall(
|
|
|
|
*query("SELECT {} FROM tx".format(cols), **constraints)
|
|
|
|
)
|
2018-10-03 13:08:02 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_transactions(self, my_account=None, **constraints):
|
2018-10-07 20:53:44 +02:00
|
|
|
my_account = my_account or constraints.get('account', None)
|
2018-10-05 01:27:39 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
tx_rows = await self.select_transactions(
|
2018-10-07 20:53:44 +02:00
|
|
|
'txid, raw, height, position, is_verified',
|
2018-11-20 00:04:07 +01:00
|
|
|
order_by=["height=0 DESC", "height DESC", "position DESC"],
|
2018-10-07 20:53:44 +02:00
|
|
|
**constraints
|
2018-09-21 15:47:31 +02:00
|
|
|
)
|
2018-10-05 01:27:39 +02:00
|
|
|
|
2018-10-10 05:57:41 +02:00
|
|
|
if not tx_rows:
|
|
|
|
return []
|
|
|
|
|
2019-04-07 00:08:33 +02:00
|
|
|
txids, txs, txi_txoids = [], [], []
|
2018-09-22 04:26:07 +02:00
|
|
|
for row in tx_rows:
|
|
|
|
txids.append(row[0])
|
2018-10-03 13:08:02 +02:00
|
|
|
txs.append(self.ledger.transaction_class(
|
2018-10-09 04:44:30 +02:00
|
|
|
raw=row[1], height=row[2], position=row[3], is_verified=bool(row[4])
|
2018-09-26 00:02:50 +02:00
|
|
|
))
|
2019-04-07 00:08:33 +02:00
|
|
|
for txi in txs[-1].inputs:
|
|
|
|
txi_txoids.append(txi.txo_ref.id)
|
2018-09-22 04:18:30 +02:00
|
|
|
|
2018-10-05 01:27:39 +02:00
|
|
|
annotated_txos = {
|
|
|
|
txo.id: txo for txo in
|
2018-10-15 04:16:51 +02:00
|
|
|
(await self.get_txos(
|
2018-10-05 01:27:39 +02:00
|
|
|
my_account=my_account,
|
|
|
|
txid__in=txids
|
|
|
|
))
|
|
|
|
}
|
2018-09-26 00:02:50 +02:00
|
|
|
|
2018-10-05 01:27:39 +02:00
|
|
|
referenced_txos = {
|
|
|
|
txo.id: txo for txo in
|
2018-10-15 04:16:51 +02:00
|
|
|
(await self.get_txos(
|
2018-10-05 01:27:39 +02:00
|
|
|
my_account=my_account,
|
2019-04-07 00:08:33 +02:00
|
|
|
txoid__in=txi_txoids
|
2018-10-05 01:27:39 +02:00
|
|
|
))
|
|
|
|
}
|
2018-09-22 04:18:30 +02:00
|
|
|
|
|
|
|
for tx in txs:
|
|
|
|
for txi in tx.inputs:
|
2018-10-05 01:27:39 +02:00
|
|
|
txo = referenced_txos.get(txi.txo_ref.id)
|
|
|
|
if txo:
|
|
|
|
txi.txo_ref = txo.ref
|
2018-09-22 04:18:30 +02:00
|
|
|
for txo in tx.outputs:
|
2018-10-05 01:27:39 +02:00
|
|
|
_txo = annotated_txos.get(txo.id)
|
|
|
|
if _txo:
|
|
|
|
txo.update_annotations(_txo)
|
2018-10-05 01:42:29 +02:00
|
|
|
else:
|
|
|
|
txo.update_annotations(None)
|
2018-09-22 04:18:30 +02:00
|
|
|
|
|
|
|
return txs
|
2018-09-21 15:47:31 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_transaction_count(self, **constraints):
|
2018-10-08 16:37:52 +02:00
|
|
|
constraints.pop('offset', None)
|
|
|
|
constraints.pop('limit', None)
|
|
|
|
constraints.pop('order_by', None)
|
2018-10-15 04:16:51 +02:00
|
|
|
count = await self.select_transactions('count(*)', **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
return count[0][0]
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_transaction(self, **constraints):
|
|
|
|
txs = await self.get_transactions(limit=1, **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
if txs:
|
|
|
|
return txs[0]
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def select_txos(self, cols, **constraints):
|
|
|
|
return await self.db.execute_fetchall(*query(
|
2018-10-08 16:53:53 +02:00
|
|
|
"SELECT {} FROM txo"
|
|
|
|
" JOIN pubkey_address USING (address)"
|
|
|
|
" JOIN tx USING (txid)".format(cols), **constraints
|
2018-10-07 20:53:44 +02:00
|
|
|
))
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_txos(self, my_account=None, **constraints):
|
2018-10-07 20:53:44 +02:00
|
|
|
my_account = my_account or constraints.get('account', None)
|
|
|
|
if isinstance(my_account, BaseAccount):
|
|
|
|
my_account = my_account.public_key.address
|
2018-11-20 00:04:07 +01:00
|
|
|
if 'order_by' not in constraints:
|
|
|
|
constraints['order_by'] = ["tx.height=0 DESC", "tx.height DESC", "tx.position DESC"]
|
2018-10-15 04:16:51 +02:00
|
|
|
rows = await self.select_txos(
|
2019-05-09 23:56:08 +02:00
|
|
|
"tx.txid, raw, tx.height, tx.position, tx.is_verified, txo.position, chain, account",
|
|
|
|
**constraints
|
2018-10-03 13:08:02 +02:00
|
|
|
)
|
2019-03-22 08:03:48 +01:00
|
|
|
txos = []
|
2019-05-09 23:53:19 +02:00
|
|
|
txs = {}
|
2019-03-22 08:03:48 +01:00
|
|
|
for row in rows:
|
2019-05-09 23:53:19 +02:00
|
|
|
if row[0] not in txs:
|
|
|
|
txs[row[0]] = self.ledger.transaction_class(
|
|
|
|
row[1], height=row[2], position=row[3], is_verified=row[4]
|
|
|
|
)
|
|
|
|
txo = txs[row[0]].outputs[row[5]]
|
|
|
|
txo.is_change = row[6] == 1
|
|
|
|
txo.is_my_account = row[7] == my_account
|
2019-03-22 08:03:48 +01:00
|
|
|
txos.append(txo)
|
|
|
|
return txos
|
2018-10-03 13:08:02 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_txo_count(self, **constraints):
|
2018-10-08 16:37:52 +02:00
|
|
|
constraints.pop('offset', None)
|
|
|
|
constraints.pop('limit', None)
|
|
|
|
constraints.pop('order_by', None)
|
2018-10-15 04:16:51 +02:00
|
|
|
count = await self.select_txos('count(*)', **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
return count[0][0]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def constrain_utxo(constraints):
|
2018-10-05 01:27:39 +02:00
|
|
|
constraints['is_reserved'] = False
|
2018-10-07 20:53:44 +02:00
|
|
|
constraints['txoid__not_in'] = "SELECT txoid FROM txi"
|
|
|
|
|
|
|
|
def get_utxos(self, **constraints):
|
|
|
|
self.constrain_utxo(constraints)
|
2018-10-03 13:08:02 +02:00
|
|
|
return self.get_txos(**constraints)
|
|
|
|
|
2018-10-08 02:14:27 +02:00
|
|
|
def get_utxo_count(self, **constraints):
|
2018-10-07 20:53:44 +02:00
|
|
|
self.constrain_utxo(constraints)
|
|
|
|
return self.get_txo_count(**constraints)
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_balance(self, **constraints):
|
2018-10-07 20:53:44 +02:00
|
|
|
self.constrain_utxo(constraints)
|
2018-10-15 04:16:51 +02:00
|
|
|
balance = await self.select_txos('SUM(amount)', **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
return balance[0][0] or 0
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def select_addresses(self, cols, **constraints):
|
|
|
|
return await self.db.execute_fetchall(*query(
|
2018-10-15 06:04:25 +02:00
|
|
|
"SELECT {} FROM pubkey_address".format(cols), **constraints
|
|
|
|
))
|
2018-10-07 20:53:44 +02:00
|
|
|
|
2018-10-15 06:04:25 +02:00
|
|
|
async def get_addresses(self, cols=('address', 'account', 'chain', 'position', 'used_times'),
|
|
|
|
**constraints):
|
2018-10-15 04:16:51 +02:00
|
|
|
addresses = await self.select_addresses(', '.join(cols), **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
return rows_to_dict(addresses, cols)
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_address_count(self, **constraints):
|
|
|
|
count = await self.select_addresses('count(*)', **constraints)
|
2018-10-07 20:53:44 +02:00
|
|
|
return count[0][0]
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def get_address(self, **constraints):
|
|
|
|
addresses = await self.get_addresses(
|
2018-10-07 20:53:44 +02:00
|
|
|
cols=('address', 'account', 'chain', 'position', 'pubkey', 'history', 'used_times'),
|
2018-10-09 04:44:30 +02:00
|
|
|
limit=1, **constraints
|
2018-06-08 05:47:46 +02:00
|
|
|
)
|
2018-10-07 20:53:44 +02:00
|
|
|
if addresses:
|
|
|
|
return addresses[0]
|
2018-06-08 05:47:46 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def add_keys(self, account, chain, keys):
|
2019-01-29 18:51:59 +01:00
|
|
|
await self.db.executemany(
|
|
|
|
"insert into pubkey_address (address, account, chain, position, pubkey) values (?, ?, ?, ?, ?)",
|
|
|
|
((pubkey.address, account.public_key.address, chain,
|
|
|
|
position, sqlite3.Binary(pubkey.pubkey_bytes))
|
|
|
|
for position, pubkey in keys)
|
2019-01-29 03:36:16 +01:00
|
|
|
)
|
2018-06-11 15:33:32 +02:00
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def _set_address_history(self, address, history):
|
|
|
|
await self.db.execute(
|
|
|
|
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
|
2018-07-15 03:34:07 +02:00
|
|
|
(history, history.count(':')//2, address)
|
2018-06-27 00:31:42 +02:00
|
|
|
)
|
|
|
|
|
2018-10-15 04:16:51 +02:00
|
|
|
async def set_address_history(self, address, history):
|
|
|
|
await self._set_address_history(address, history)
|