2020-07-11 18:18:33 -04:00
|
|
|
import logging
|
2020-12-28 10:34:08 -05:00
|
|
|
from typing import Tuple, List, Set, Iterator, Optional
|
2020-07-11 18:18:33 -04:00
|
|
|
|
|
|
|
from sqlalchemy import func
|
|
|
|
from sqlalchemy.future import select
|
|
|
|
|
2020-12-28 10:34:08 -05:00
|
|
|
from lbry.crypto.hash import hash160
|
|
|
|
from lbry.crypto.bip32 import PubKey
|
|
|
|
|
2020-07-11 18:18:33 -04:00
|
|
|
from ..utils import query
|
|
|
|
from ..query_context import context
|
|
|
|
from ..tables import TXO, PubkeyAddress, AccountAddress
|
2021-01-04 10:44:36 -05:00
|
|
|
from .filters import (
|
|
|
|
get_filter_matchers, get_filter_matchers_at_granularity, has_filter_range,
|
|
|
|
get_tx_matchers_for_missing_txs,
|
|
|
|
)
|
2020-07-11 18:18:33 -04:00
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-12-28 10:34:08 -05:00
|
|
|
class DatabaseAddressIterator:
|
|
|
|
|
|
|
|
def __init__(self, account_id, chain):
|
|
|
|
self.account_id = account_id
|
|
|
|
self.chain = chain
|
|
|
|
self.n = -1
|
|
|
|
|
|
|
|
def __iter__(self) -> Iterator[Tuple[bytes, int, bool]]:
|
|
|
|
with context().connect_streaming() as c:
|
|
|
|
sql = (
|
|
|
|
select(
|
|
|
|
AccountAddress.c.pubkey,
|
|
|
|
AccountAddress.c.n
|
|
|
|
).where(
|
|
|
|
(AccountAddress.c.account == self.account_id) &
|
|
|
|
(AccountAddress.c.chain == self.chain)
|
|
|
|
).order_by(AccountAddress.c.n)
|
|
|
|
)
|
|
|
|
for row in c.execute(sql):
|
|
|
|
self.n = row['n']
|
|
|
|
yield hash160(row['pubkey']), self.n, False
|
|
|
|
|
|
|
|
|
|
|
|
class PersistingAddressIterator(DatabaseAddressIterator):
|
|
|
|
|
|
|
|
def __init__(self, account_id, chain, pubkey_bytes, chain_code, depth):
|
|
|
|
super().__init__(account_id, chain)
|
|
|
|
self.pubkey_bytes = pubkey_bytes
|
|
|
|
self.chain_code = chain_code
|
|
|
|
self.depth = depth
|
|
|
|
self.pubkey_buffer = []
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
if self.pubkey_buffer:
|
|
|
|
add_keys([{
|
|
|
|
'account': self.account_id,
|
|
|
|
'address': k.address,
|
|
|
|
'chain': self.chain,
|
|
|
|
'pubkey': k.pubkey_bytes,
|
|
|
|
'chain_code': k.chain_code,
|
|
|
|
'n': k.n,
|
|
|
|
'depth': k.depth
|
|
|
|
} for k in self.pubkey_buffer])
|
|
|
|
self.pubkey_buffer.clear()
|
|
|
|
|
|
|
|
def __enter__(self) -> 'PersistingAddressIterator':
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.flush()
|
|
|
|
|
|
|
|
def __iter__(self) -> Iterator[Tuple[bytes, int, bool]]:
|
|
|
|
yield from super().__iter__()
|
|
|
|
pubkey = PubKey(context().ledger, self.pubkey_bytes, self.chain_code, 0, self.depth)
|
|
|
|
while True:
|
|
|
|
self.n += 1
|
|
|
|
pubkey_child = pubkey.child(self.n)
|
|
|
|
self.pubkey_buffer.append(pubkey_child)
|
|
|
|
if len(self.pubkey_buffer) >= 900:
|
|
|
|
self.flush()
|
|
|
|
yield hash160(pubkey_child.pubkey_bytes), self.n, True
|
|
|
|
|
|
|
|
|
|
|
|
def generate_addresses_using_filters(best_height, allowed_gap, address_manager) -> Set:
|
|
|
|
need, have = set(), set()
|
|
|
|
matchers = get_filter_matchers(best_height)
|
|
|
|
with PersistingAddressIterator(*address_manager) as addresses:
|
|
|
|
gap = 0
|
2021-01-07 23:06:55 -05:00
|
|
|
for address_hash, n, is_new in addresses: # pylint: disable=unused-variable
|
2020-12-28 10:34:08 -05:00
|
|
|
gap += 1
|
|
|
|
address_bytes = bytearray(address_hash)
|
2021-01-07 23:06:55 -05:00
|
|
|
for matcher, filter_range in matchers:
|
2020-12-28 10:34:08 -05:00
|
|
|
if matcher.Match(address_bytes):
|
|
|
|
gap = 0
|
2021-01-04 10:44:36 -05:00
|
|
|
if filter_range not in need and filter_range not in have:
|
|
|
|
if has_filter_range(*filter_range):
|
|
|
|
have.add(filter_range)
|
2020-12-28 10:34:08 -05:00
|
|
|
else:
|
2021-01-04 10:44:36 -05:00
|
|
|
need.add(filter_range)
|
2020-12-28 10:34:08 -05:00
|
|
|
if gap >= allowed_gap:
|
|
|
|
break
|
|
|
|
return need
|
|
|
|
|
|
|
|
|
|
|
|
def get_missing_sub_filters_for_addresses(granularity, address_manager):
|
|
|
|
need = set()
|
2021-01-07 23:06:55 -05:00
|
|
|
for matcher, filter_range in get_filter_matchers_at_granularity(granularity):
|
|
|
|
for address_hash, _, _ in DatabaseAddressIterator(*address_manager):
|
2021-01-04 10:44:36 -05:00
|
|
|
address_bytes = bytearray(address_hash)
|
|
|
|
if matcher.Match(address_bytes) and not has_filter_range(*filter_range):
|
|
|
|
need.add(filter_range)
|
|
|
|
break
|
|
|
|
return need
|
|
|
|
|
|
|
|
|
|
|
|
def get_missing_tx_for_addresses(address_manager):
|
|
|
|
need = set()
|
|
|
|
for tx_hash, matcher in get_tx_matchers_for_missing_txs():
|
2021-01-07 23:06:55 -05:00
|
|
|
for address_hash, _, _ in DatabaseAddressIterator(*address_manager):
|
2021-01-04 10:44:36 -05:00
|
|
|
address_bytes = bytearray(address_hash)
|
|
|
|
if matcher.Match(address_bytes):
|
|
|
|
need.add(tx_hash)
|
|
|
|
break
|
2020-12-28 10:34:08 -05:00
|
|
|
return need
|
|
|
|
|
|
|
|
|
2020-07-11 18:18:33 -04:00
|
|
|
def update_address_used_times(addresses):
|
|
|
|
context().execute(
|
|
|
|
PubkeyAddress.update()
|
|
|
|
.values(used_times=(
|
|
|
|
select(func.count(TXO.c.address))
|
|
|
|
.where((TXO.c.address == PubkeyAddress.c.address)),
|
|
|
|
))
|
|
|
|
.where(PubkeyAddress.c.address._in(addresses))
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def select_addresses(cols, **constraints):
|
|
|
|
return context().fetchall(query(
|
|
|
|
[AccountAddress, PubkeyAddress],
|
|
|
|
select(*cols).select_from(PubkeyAddress.join(AccountAddress)),
|
|
|
|
**constraints
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
|
|
def get_addresses(cols=None, include_total=False, **constraints) -> Tuple[List[dict], Optional[int]]:
|
|
|
|
if cols is None:
|
|
|
|
cols = (
|
|
|
|
PubkeyAddress.c.address,
|
|
|
|
PubkeyAddress.c.used_times,
|
|
|
|
AccountAddress.c.account,
|
|
|
|
AccountAddress.c.chain,
|
|
|
|
AccountAddress.c.pubkey,
|
|
|
|
AccountAddress.c.chain_code,
|
|
|
|
AccountAddress.c.n,
|
|
|
|
AccountAddress.c.depth
|
|
|
|
)
|
|
|
|
return (
|
|
|
|
select_addresses(cols, **constraints),
|
|
|
|
get_address_count(**constraints) if include_total else None
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def get_address_count(**constraints):
|
2020-12-28 10:34:08 -05:00
|
|
|
count = select_addresses([func.count().label("total")], **constraints)
|
|
|
|
return count[0]["total"] or 0
|
2020-07-11 18:18:33 -04:00
|
|
|
|
|
|
|
|
2020-12-28 10:34:08 -05:00
|
|
|
def get_all_addresses():
|
|
|
|
return [r["address"] for r in context().fetchall(select(PubkeyAddress.c.address))]
|
2020-07-11 18:18:33 -04:00
|
|
|
|
|
|
|
|
2020-10-17 10:20:49 -04:00
|
|
|
def add_keys(pubkeys):
|
2020-07-11 18:18:33 -04:00
|
|
|
c = context()
|
2020-12-15 13:42:32 -03:00
|
|
|
current_limit = c.variable_limit // len(pubkeys[0]) # (overall limit) // (maximum on a query)
|
|
|
|
for start in range(0, len(pubkeys), current_limit - 1):
|
|
|
|
batch = pubkeys[start:(start + current_limit - 1)]
|
2020-11-25 15:07:24 -03:00
|
|
|
c.execute(c.insert_or_ignore(PubkeyAddress).values([{'address': k['address']} for k in batch]))
|
|
|
|
c.execute(c.insert_or_ignore(AccountAddress).values(batch))
|