lbry-sdk/lbry/service/base.py

273 lines
9.4 KiB
Python
Raw Normal View History

2020-05-01 15:33:58 +02:00
import os
import asyncio
import logging
2020-10-13 20:34:19 +02:00
from typing import List, Optional, Tuple, NamedTuple, Dict
2020-05-01 15:33:58 +02:00
2020-06-05 06:35:22 +02:00
from lbry.db import Database, Result
2020-05-01 15:33:58 +02:00
from lbry.db.constants import TXO_TYPES
2020-05-02 05:25:07 +02:00
from lbry.schema.result import Censor
2020-05-01 15:33:58 +02:00
from lbry.blockchain.transaction import Transaction, Output
from lbry.blockchain.ledger import Ledger
from lbry.wallet import WalletManager
from lbry.event import EventController, EventStream
2020-05-01 15:33:58 +02:00
log = logging.getLogger(__name__)
class BlockEvent(NamedTuple):
height: int
class Sync:
2020-09-11 20:08:06 +02:00
"""
Maintains local state in sync with some upstream source of truth.
Client stays synced with wallet server
Server stays synced with lbrycrd
"""
2020-05-01 15:33:58 +02:00
on_block: EventStream
on_mempool: EventStream
2020-05-21 00:05:13 +02:00
def __init__(self, ledger: Ledger, db: Database):
self.ledger = ledger
self.conf = ledger.conf
self.db = db
2020-05-01 15:33:58 +02:00
2020-06-05 06:35:22 +02:00
self._on_progress_controller = db._on_progress_controller
self.on_progress = db.on_progress
2020-05-01 15:33:58 +02:00
self._on_ready_controller = EventController()
self.on_ready = self._on_ready_controller.stream
def on_bulk_started(self):
return self.on_progress.where() # filter for bulk started event
def on_bulk_finished(self):
return self.on_progress.where() # filter for bulk finished event
async def start(self):
raise NotImplementedError
async def stop(self):
raise NotImplementedError
2020-10-16 18:52:57 +02:00
async def get_block_headers(self, start_height: int, end_height: int = None):
raise NotImplementedError
async def get_best_block_height(self) -> int:
raise NotImplementedError
2020-05-01 15:33:58 +02:00
class Service:
"""
Base class for light client and full node LBRY service implementations.
2020-09-11 20:08:06 +02:00
This is the programmatic api (as compared to API)
2020-05-01 15:33:58 +02:00
"""
sync: Sync
2020-06-05 06:35:22 +02:00
def __init__(self, ledger: Ledger):
2020-05-01 15:33:58 +02:00
self.ledger, self.conf = ledger, ledger.conf
2020-06-05 06:35:22 +02:00
self.db = Database(ledger)
self.wallets = WalletManager(self.db)
2020-05-01 15:33:58 +02:00
# sync has established connection with a source from which it can synchronize
# for full service this is lbrycrd (or sync service) and for light this is full node
self._on_connected_controller = EventController()
self.on_connected = self._on_connected_controller.stream
async def start(self):
await self.db.open()
await self.wallets.storage.prepare()
await self.wallets.initialize()
2020-05-01 15:33:58 +02:00
await self.sync.start()
async def stop(self):
await self.sync.stop()
await self.db.close()
2020-07-07 16:52:41 +02:00
async def get_status(self):
2020-05-01 15:33:58 +02:00
pass
def get_version(self):
pass
async def find_ffmpeg(self):
pass
async def get_file(self, uri, **kwargs):
2020-05-01 15:33:58 +02:00
pass
def create_wallet(self, wallet_id):
return self.wallets.create(wallet_id)
2020-05-01 15:33:58 +02:00
async def get_addresses(self, **constraints):
return await self.db.get_addresses(**constraints)
async def get_address_filters(self, start_height: int, end_height: int=None, granularity: int=0):
raise NotImplementedError
2020-05-01 15:33:58 +02:00
def reserve_outputs(self, txos):
return self.db.reserve_outputs(txos)
def release_outputs(self, txos):
return self.db.release_outputs(txos)
def release_tx(self, tx):
return self.release_outputs([txi.txo_ref.txo for txi in tx.inputs])
def get_utxos(self, **constraints):
self.constraint_spending_utxos(constraints)
return self.db.get_utxos(**constraints)
2020-06-05 06:35:22 +02:00
async def get_txos(self, resolve=False, **constraints) -> Result[Output]:
txos = await self.db.get_txos(**constraints)
2020-05-01 15:33:58 +02:00
if resolve:
2020-06-05 06:35:22 +02:00
return await self._resolve_for_local_results(constraints.get('accounts', []), txos)
return txos
2020-05-01 15:33:58 +02:00
def get_txo_sum(self, **constraints):
return self.db.get_txo_sum(**constraints)
def get_txo_plot(self, **constraints):
return self.db.get_txo_plot(**constraints)
def get_transactions(self, **constraints):
return self.db.get_transactions(**constraints)
2020-05-06 16:53:31 +02:00
async def get_transaction(self, tx_hash: bytes):
tx = await self.db.get_transaction(tx_hash=tx_hash)
if tx:
return tx
2020-06-05 06:35:22 +02:00
# try:
# raw, merkle = await self.ledger.network.get_transaction_and_merkle(tx_hash)
# except CodeMessageError as e:
# if 'No such mempool or blockchain transaction.' in e.message:
# return {'success': False, 'code': 404, 'message': 'transaction not found'}
# return {'success': False, 'code': e.code, 'message': e.message}
# height = merkle.get('block_height')
# tx = Transaction(unhexlify(raw), height=height)
# if height and height > 0:
# await self.ledger.maybe_verify_transaction(tx, height, merkle)
# return tx
2020-05-01 15:33:58 +02:00
async def search_transactions(self, txids):
raise NotImplementedError
async def sum_supports(self, claim_hash: bytes, include_channel_content=False, exclude_own_supports=False) \
-> Tuple[List[Dict], int]:
2020-10-13 20:34:19 +02:00
raise NotImplementedError
async def announce_addresses(self, address_manager, addresses: List[str]):
2020-05-01 15:33:58 +02:00
await self.ledger.announce_addresses(address_manager, addresses)
async def get_address_manager_for_address(self, address):
2020-05-01 15:33:58 +02:00
details = await self.db.get_address(address=address)
2020-06-05 06:35:22 +02:00
for wallet in self.wallets:
for account in wallet.accounts:
if account.id == details['account']:
return account.address_managers[details['chain']]
2020-05-01 15:33:58 +02:00
return None
2020-05-06 16:53:31 +02:00
async def reset(self):
2020-06-05 06:35:22 +02:00
self.ledger.conf = {
2020-05-06 16:53:31 +02:00
'auto_connect': True,
2020-06-05 06:35:22 +02:00
'default_servers': self.conf.lbryum_servers,
'data_path': self.conf.wallet_dir,
2020-05-06 16:53:31 +02:00
}
await self.ledger.stop()
await self.ledger.start()
async def get_best_blockhash(self):
if len(self.ledger.headers) <= 0:
return self.ledger.genesis_hash
return (await self.ledger.headers.hash(self.ledger.headers.height)).decode()
2020-06-05 06:35:22 +02:00
async def maybe_broadcast_or_release(self, tx, preview=False, no_wait=False):
if preview:
return await self.release_tx(tx)
2020-05-01 15:33:58 +02:00
try:
await self.broadcast(tx)
2020-06-05 06:35:22 +02:00
if not no_wait:
await self.wait(tx)
except Exception:
2020-05-01 15:33:58 +02:00
await self.release_tx(tx)
raise
async def broadcast(self, tx):
raise NotImplementedError
async def wait(self, tx: Transaction, height=-1, timeout=1):
raise NotImplementedError
2020-07-13 21:45:21 +02:00
async def resolve(self, urls, **kwargs):
2020-05-01 15:33:58 +02:00
raise NotImplementedError
2020-05-02 05:25:07 +02:00
async def search_claims(self, accounts, **kwargs) -> Tuple[List[Output], Optional[int], Censor]:
2020-05-01 15:33:58 +02:00
raise NotImplementedError
2020-08-11 00:37:21 +02:00
async def search_supports(self, accounts, **kwargs) -> Tuple[List[Output], Optional[int]]:
raise NotImplementedError
2020-05-01 15:33:58 +02:00
async def get_claim_by_claim_id(self, accounts, claim_id, **kwargs) -> Output:
for claim in (await self.search_claims(accounts, claim_id=claim_id, **kwargs))[0]:
return claim
@staticmethod
def constraint_spending_utxos(constraints):
constraints['txo_type__in'] = (0, TXO_TYPES['purchase'])
2020-05-06 16:53:31 +02:00
async def get_purchases(self, wallet, resolve=False, **constraints):
purchases = await wallet.get_purchases(**constraints)
2020-05-01 15:33:58 +02:00
if resolve:
claim_ids = [p.purchased_claim_id for p in purchases]
try:
2020-06-05 06:35:22 +02:00
resolved, _, _ = await self.search_claims([], claim_ids=claim_ids)
2020-05-01 15:33:58 +02:00
except Exception as err:
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
raise
log.exception("Resolve failed while looking up purchased claim ids:")
resolved = []
lookup = {claim.claim_id: claim for claim in resolved}
for purchase in purchases:
purchase.purchased_claim = lookup.get(purchase.purchased_claim_id)
return purchases
2020-06-05 06:35:22 +02:00
async def _resolve_for_local_results(self, accounts, txos: Result) -> Result:
2020-05-01 15:33:58 +02:00
results = []
response = await self.resolve(
2020-07-13 21:45:21 +02:00
[txo.permanent_url for txo in txos if txo.can_decode_claim], accounts=accounts
2020-05-01 15:33:58 +02:00
)
for txo in txos:
resolved = response.get(txo.permanent_url) if txo.can_decode_claim else None
if isinstance(resolved, Output):
resolved.update_annotations(txo)
results.append(resolved)
else:
if isinstance(resolved, dict) and 'error' in resolved:
txo.meta['error'] = resolved['error']
results.append(txo)
2020-06-05 06:35:22 +02:00
txos.rows = results
return txos
2020-05-01 15:33:58 +02:00
async def resolve_collection(self, collection, offset=0, page_size=1):
claim_ids = collection.claim.collection.claims.ids[offset:page_size+offset]
try:
2020-06-05 06:35:22 +02:00
resolve_results, _, _ = await self.search_claims([], claim_ids=claim_ids)
2020-05-01 15:33:58 +02:00
except Exception as err:
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
raise
log.exception("Resolve failed while looking up collection claim ids:")
return []
claims = []
for claim_id in claim_ids:
found = False
for txo in resolve_results:
if txo.claim_id == claim_id:
claims.append(txo)
found = True
break
if not found:
claims.append(None)
return claims