tx parsing speedup and faster wallet sync

This commit is contained in:
Lex Berezhny 2019-07-09 01:00:05 -04:00
parent 8c878f8e25
commit 8c63033461
4 changed files with 29 additions and 17 deletions

View file

@ -76,7 +76,7 @@ class WalletDatabase(BaseDatabase):
)) ))
} }
for txo in txos: for txo in txos:
if txo.script.is_claim_name or txo.script.is_update_claim: if txo.is_claim and txo.can_decode_claim:
txo.channel = channels.get(txo.claim.signing_channel_id, None) txo.channel = channels.get(txo.claim.signing_channel_id, None)
return txos return txos

View file

@ -7,7 +7,7 @@ from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable
import sqlite3 import sqlite3
from torba.client.basetransaction import BaseTransaction from torba.client.basetransaction import BaseTransaction, TXRefImmutable
from torba.client.baseaccount import BaseAccount from torba.client.baseaccount import BaseAccount
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -470,24 +470,34 @@ class BaseDatabase(SQLiteMixin):
" JOIN tx USING (txid)".format(cols), **constraints " JOIN tx USING (txid)".format(cols), **constraints
)) ))
async def get_txos(self, my_account=None, **constraints): async def get_txos(self, my_account=None, no_tx=False, **constraints):
my_account = my_account or constraints.get('account', None) my_account = my_account or constraints.get('account', None)
if isinstance(my_account, BaseAccount): if isinstance(my_account, BaseAccount):
my_account = my_account.public_key.address my_account = my_account.public_key.address
if 'order_by' not in constraints: if 'order_by' not in constraints:
constraints['order_by'] = ["tx.height=0 DESC", "tx.height DESC", "tx.position DESC"] constraints['order_by'] = ["tx.height=0 DESC", "tx.height DESC", "tx.position DESC"]
rows = await self.select_txos( rows = await self.select_txos(
"tx.txid, raw, tx.height, tx.position, tx.is_verified, txo.position, chain, account", "tx.txid, raw, tx.height, tx.position, tx.is_verified, "
"txo.position, chain, account, amount, script",
**constraints **constraints
) )
txos = [] txos = []
txs = {} txs = {}
output_class = self.ledger.transaction_class.output_class
for row in rows: for row in rows:
if row[0] not in txs: if no_tx:
txs[row[0]] = self.ledger.transaction_class( txo = output_class(
row[1], height=row[2], position=row[3], is_verified=row[4] amount=row[8],
script=output_class.script_class(row[9]),
tx_ref=TXRefImmutable.from_id(row[0], row[2]),
position=row[5]
) )
txo = txs[row[0]].outputs[row[5]] else:
if row[0] not in txs:
txs[row[0]] = self.ledger.transaction_class(
row[1], height=row[2], position=row[3], is_verified=row[4]
)
txo = txs[row[0]].outputs[row[5]]
txo.is_change = row[6] == 1 txo.is_change = row[6] == 1
txo.is_my_account = row[7] == my_account txo.is_my_account = row[7] == my_account
txos.append(txo) txos.append(txo)

View file

@ -9,6 +9,7 @@ from typing import Dict, Type, Iterable, List, Optional
from operator import itemgetter from operator import itemgetter
from collections import namedtuple from collections import namedtuple
import pylru
from torba.tasks import TaskGroup from torba.tasks import TaskGroup
from torba.client import baseaccount, basenetwork, basetransaction from torba.client import baseaccount, basenetwork, basetransaction
from torba.client.basedatabase import BaseDatabase from torba.client.basedatabase import BaseDatabase
@ -134,7 +135,7 @@ class BaseLedger(metaclass=LedgerRegistry):
) )
) )
self._tx_cache = {} self._tx_cache = pylru.lrucache(100000)
self._update_tasks = TaskGroup() self._update_tasks = TaskGroup()
self._utxo_reservation_lock = asyncio.Lock() self._utxo_reservation_lock = asyncio.Lock()
self._header_processing_lock = asyncio.Lock() self._header_processing_lock = asyncio.Lock()
@ -421,8 +422,8 @@ class BaseLedger(metaclass=LedgerRegistry):
else: else:
check_db_for_txos.append(txi.txo_ref.id) check_db_for_txos.append(txi.txo_ref.id)
referenced_txos = { referenced_txos = {} if not check_db_for_txos else {
txo.id: txo for txo in await self.db.get_txos(txoid__in=check_db_for_txos) txo.id: txo for txo in await self.db.get_txos(txoid__in=check_db_for_txos, no_tx=True)
} }
for txi in tx.inputs: for txi in tx.inputs:

View file

@ -310,19 +310,20 @@ class BaseTransaction:
def outputs(self) -> ReadOnlyList[BaseOutput]: def outputs(self) -> ReadOnlyList[BaseOutput]:
return ReadOnlyList(self._outputs) return ReadOnlyList(self._outputs)
def _add(self, new_ios: Iterable[InputOutput], existing_ios: List) -> 'BaseTransaction': def _add(self, existing_ios: List, new_ios: Iterable[InputOutput], reset=False) -> 'BaseTransaction':
for txio in new_ios: for txio in new_ios:
txio.tx_ref = self.ref txio.tx_ref = self.ref
txio.position = len(existing_ios) txio.position = len(existing_ios)
existing_ios.append(txio) existing_ios.append(txio)
self._reset() if reset:
self._reset()
return self return self
def add_inputs(self, inputs: Iterable[BaseInput]) -> 'BaseTransaction': def add_inputs(self, inputs: Iterable[BaseInput]) -> 'BaseTransaction':
return self._add(inputs, self._inputs) return self._add(self._inputs, inputs, True)
def add_outputs(self, outputs: Iterable[BaseOutput]) -> 'BaseTransaction': def add_outputs(self, outputs: Iterable[BaseOutput]) -> 'BaseTransaction':
return self._add(outputs, self._outputs) return self._add(self._outputs, outputs, True)
@property @property
def size(self) -> int: def size(self) -> int:
@ -420,11 +421,11 @@ class BaseTransaction:
stream = BCDataStream(self._raw) stream = BCDataStream(self._raw)
self.version = stream.read_uint32() self.version = stream.read_uint32()
input_count = stream.read_compact_size() input_count = stream.read_compact_size()
self.add_inputs([ self._add(self._inputs, [
self.input_class.deserialize_from(stream) for _ in range(input_count) self.input_class.deserialize_from(stream) for _ in range(input_count)
]) ])
output_count = stream.read_compact_size() output_count = stream.read_compact_size()
self.add_outputs([ self._add(self._outputs, [
self.output_class.deserialize_from(stream) for _ in range(output_count) self.output_class.deserialize_from(stream) for _ in range(output_count)
]) ])
self.locktime = stream.read_uint32() self.locktime = stream.read_uint32()