2018-05-26 05:26:07 +02:00
|
|
|
import os
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2018-10-16 21:04:20 +02:00
|
|
|
import asyncio
|
2018-08-06 06:28:11 +02:00
|
|
|
import logging
|
2018-10-05 01:50:19 +02:00
|
|
|
from binascii import unhexlify
|
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
from datetime import datetime
|
2018-10-08 16:55:46 +02:00
|
|
|
from typing import Optional
|
2018-09-26 04:40:52 +02:00
|
|
|
|
2018-10-16 05:12:23 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2018-11-04 08:52:58 +01:00
|
|
|
from lbrynet.schema.schema import SECP256k1
|
2018-11-04 07:24:41 +01:00
|
|
|
from torba.client.basemanager import BaseWalletManager
|
2018-12-05 15:17:36 +01:00
|
|
|
from torba.rpc.jsonrpc import CodeMessageError
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-09-17 22:31:44 +02:00
|
|
|
from lbrynet.schema.claim import ClaimDict
|
2018-07-01 23:21:18 +02:00
|
|
|
|
2018-11-09 20:02:03 +01:00
|
|
|
from lbrynet.extras.wallet.ledger import MainNetLedger
|
|
|
|
from lbrynet.extras.wallet.account import BaseAccount, generate_certificate
|
|
|
|
from lbrynet.extras.wallet.transaction import Transaction
|
|
|
|
from lbrynet.extras.wallet.database import WalletDatabase
|
|
|
|
from lbrynet.extras.wallet.dewies import dewies_to_lbc
|
2018-06-14 21:18:36 +02:00
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-08-23 05:19:04 +02:00
|
|
|
class ReservedPoints:
|
|
|
|
def __init__(self, identifier, amount):
|
|
|
|
self.identifier = identifier
|
|
|
|
self.amount = amount
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class BackwardsCompatibleNetwork:
|
2018-05-26 05:26:07 +02:00
|
|
|
def __init__(self, manager):
|
|
|
|
self.manager = manager
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def get_local_height(self):
|
2018-07-26 05:29:13 +02:00
|
|
|
for ledger in self.manager.ledgers.values():
|
|
|
|
assert isinstance(ledger, MainNetLedger)
|
|
|
|
return ledger.headers.height
|
2018-04-30 09:04:52 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def get_server_height(self):
|
|
|
|
return self.get_local_height()
|
|
|
|
|
|
|
|
|
|
|
|
class LbryWalletManager(BaseWalletManager):
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def ledger(self) -> MainNetLedger:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.default_account.ledger
|
|
|
|
|
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def db(self) -> WalletDatabase:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.ledger.db
|
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
@property
|
|
|
|
def wallet(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
@property
|
|
|
|
def network(self):
|
|
|
|
return BackwardsCompatibleNetwork(self)
|
2018-04-23 04:23:42 +02:00
|
|
|
|
|
|
|
@property
|
2018-05-26 05:26:07 +02:00
|
|
|
def use_encryption(self):
|
2018-09-25 15:41:31 +02:00
|
|
|
return self.default_account.serialize_encrypted
|
2018-03-26 04:59:57 +02:00
|
|
|
|
|
|
|
@property
|
2018-05-26 05:26:07 +02:00
|
|
|
def is_first_run(self):
|
|
|
|
return True
|
|
|
|
|
2018-08-22 15:41:59 +02:00
|
|
|
@property
|
|
|
|
def is_wallet_unlocked(self):
|
2018-09-25 15:41:31 +02:00
|
|
|
return not self.default_account.encrypted
|
2018-08-22 15:41:59 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def check_locked(self):
|
2018-10-15 23:16:43 +02:00
|
|
|
return self.default_account.encrypted
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
def decrypt_account(self, account):
|
|
|
|
assert account.password is not None, "account is not unlocked"
|
|
|
|
assert not account.encrypted, "account is not unlocked"
|
|
|
|
account.serialize_encrypted = False
|
|
|
|
self.save()
|
|
|
|
return not account.encrypted and not account.serialize_encrypted
|
|
|
|
|
|
|
|
def encrypt_account(self, password, account):
|
|
|
|
assert not account.encrypted, "account is already encrypted"
|
|
|
|
account.encrypt(password)
|
|
|
|
account.serialize_encrypted = True
|
|
|
|
self.save()
|
2018-11-29 21:16:53 +01:00
|
|
|
self.unlock_account(password, account)
|
|
|
|
return account.serialize_encrypted
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
def unlock_account(self, password, account):
|
|
|
|
assert account.encrypted, "account is not locked"
|
|
|
|
account.decrypt(password)
|
|
|
|
return not account.encrypted
|
|
|
|
|
|
|
|
def lock_account(self, account):
|
|
|
|
assert account.password is not None, "account is already locked"
|
|
|
|
assert not account.encrypted and account.serialize_encrypted, "account is not encrypted"
|
|
|
|
account.encrypt(account.password)
|
|
|
|
return account.encrypted
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-08-17 16:35:56 +02:00
|
|
|
@staticmethod
|
|
|
|
def migrate_lbryum_to_torba(path):
|
|
|
|
if not os.path.exists(path):
|
2018-10-05 01:50:19 +02:00
|
|
|
return None, None
|
2018-08-17 16:35:56 +02:00
|
|
|
with open(path, 'r') as f:
|
|
|
|
unmigrated_json = f.read()
|
|
|
|
unmigrated = json.loads(unmigrated_json)
|
|
|
|
# TODO: After several public releases of new torba based wallet, we can delete
|
|
|
|
# this lbryum->torba conversion code and require that users who still
|
|
|
|
# have old structured wallets install one of the earlier releases that
|
|
|
|
# still has the below conversion code.
|
|
|
|
if 'master_public_keys' not in unmigrated:
|
2018-10-05 01:50:19 +02:00
|
|
|
return None, None
|
|
|
|
total = unmigrated.get('addr_history')
|
|
|
|
receiving_addresses, change_addresses = set(), set()
|
|
|
|
for _, unmigrated_account in unmigrated.get('accounts', {}).items():
|
|
|
|
receiving_addresses.update(map(unhexlify, unmigrated_account.get('receiving', [])))
|
|
|
|
change_addresses.update(map(unhexlify, unmigrated_account.get('change', [])))
|
|
|
|
log.info("Wallet migrator found %s receiving addresses and %s change addresses. %s in total on history.",
|
|
|
|
len(receiving_addresses), len(change_addresses), len(total))
|
|
|
|
|
2018-08-17 16:35:56 +02:00
|
|
|
migrated_json = json.dumps({
|
|
|
|
'version': 1,
|
|
|
|
'name': 'My Wallet',
|
|
|
|
'accounts': [{
|
|
|
|
'version': 1,
|
|
|
|
'name': 'Main Account',
|
|
|
|
'ledger': 'lbc_mainnet',
|
|
|
|
'encrypted': unmigrated['use_encryption'],
|
|
|
|
'seed': unmigrated['seed'],
|
|
|
|
'seed_version': unmigrated['seed_version'],
|
|
|
|
'private_key': unmigrated['master_private_keys']['x/'],
|
|
|
|
'public_key': unmigrated['master_public_keys']['x/'],
|
|
|
|
'certificates': unmigrated.get('claim_certificates', {}),
|
|
|
|
'address_generator': {
|
|
|
|
'name': 'deterministic-chain',
|
2018-11-20 01:23:23 +01:00
|
|
|
'receiving': {'gap': 20, 'maximum_uses_per_address': 1},
|
|
|
|
'change': {'gap': 6, 'maximum_uses_per_address': 1}
|
2018-08-17 16:35:56 +02:00
|
|
|
}
|
|
|
|
}]
|
|
|
|
}, indent=4, sort_keys=True)
|
|
|
|
mode = os.stat(path).st_mode
|
|
|
|
i = 1
|
|
|
|
backup_path_template = os.path.join(os.path.dirname(path), "old_lbryum_wallet") + "_%i"
|
|
|
|
while os.path.isfile(backup_path_template % i):
|
|
|
|
i += 1
|
|
|
|
os.rename(path, backup_path_template % i)
|
2018-10-18 12:42:45 +02:00
|
|
|
temp_path = "{}.tmp.{}".format(path, os.getpid())
|
2018-08-17 16:35:56 +02:00
|
|
|
with open(temp_path, "w") as f:
|
|
|
|
f.write(migrated_json)
|
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
|
|
|
os.rename(temp_path, path)
|
|
|
|
os.chmod(path, mode)
|
2018-10-05 01:50:19 +02:00
|
|
|
return receiving_addresses, change_addresses
|
2018-08-17 16:35:56 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
@classmethod
|
2018-10-15 23:16:43 +02:00
|
|
|
async def from_lbrynet_config(cls, settings, db):
|
2018-07-01 23:21:18 +02:00
|
|
|
|
|
|
|
ledger_id = {
|
|
|
|
'lbrycrd_main': 'lbc_mainnet',
|
|
|
|
'lbrycrd_testnet': 'lbc_testnet',
|
|
|
|
'lbrycrd_regtest': 'lbc_regtest'
|
|
|
|
}[settings['blockchain_name']]
|
|
|
|
|
|
|
|
ledger_config = {
|
|
|
|
'auto_connect': True,
|
|
|
|
'default_servers': settings['lbryum_servers'],
|
2018-07-07 07:11:27 +02:00
|
|
|
'data_path': settings['lbryum_wallet_dir'],
|
2018-07-12 05:18:59 +02:00
|
|
|
'use_keyring': settings['use_keyring'],
|
2018-07-12 07:05:24 +02:00
|
|
|
#'db': db
|
2018-07-01 23:21:18 +02:00
|
|
|
}
|
|
|
|
|
2018-08-23 08:55:17 +02:00
|
|
|
wallets_directory = os.path.join(settings['lbryum_wallet_dir'], 'wallets')
|
|
|
|
if not os.path.exists(wallets_directory):
|
|
|
|
os.mkdir(wallets_directory)
|
|
|
|
|
|
|
|
wallet_file_path = os.path.join(wallets_directory, 'default_wallet')
|
2018-08-17 16:35:56 +02:00
|
|
|
|
2018-10-05 01:50:19 +02:00
|
|
|
receiving_addresses, change_addresses = cls.migrate_lbryum_to_torba(wallet_file_path)
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
manager = cls.from_config({
|
2018-07-01 23:21:18 +02:00
|
|
|
'ledgers': {ledger_id: ledger_config},
|
2018-07-12 05:18:59 +02:00
|
|
|
'wallets': [wallet_file_path]
|
2018-05-26 05:26:07 +02:00
|
|
|
})
|
2018-10-12 18:28:39 +02:00
|
|
|
ledger = manager.get_or_create_ledger(ledger_id)
|
2018-08-06 06:28:11 +02:00
|
|
|
if manager.default_account is None:
|
|
|
|
log.info('Wallet at %s is empty, generating a default account.', wallet_file_path)
|
|
|
|
manager.default_wallet.generate_account(ledger)
|
|
|
|
manager.default_wallet.save()
|
2018-10-05 01:50:19 +02:00
|
|
|
if receiving_addresses or change_addresses:
|
|
|
|
if not os.path.exists(ledger.path):
|
|
|
|
os.mkdir(ledger.path)
|
2018-10-15 23:16:43 +02:00
|
|
|
await ledger.db.open()
|
2018-10-05 01:50:19 +02:00
|
|
|
try:
|
2018-10-15 23:16:43 +02:00
|
|
|
await manager._migrate_addresses(receiving_addresses, change_addresses)
|
2018-10-05 01:50:19 +02:00
|
|
|
finally:
|
2018-10-15 23:16:43 +02:00
|
|
|
await ledger.db.close()
|
|
|
|
return manager
|
2018-10-05 01:50:19 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def _migrate_addresses(self, receiving_addresses: set, change_addresses: set):
|
2018-11-20 22:12:51 +01:00
|
|
|
async with self.default_account.receiving.address_generator_lock:
|
|
|
|
migrated_receiving = set((await self.default_account.receiving._generate_keys(0, len(receiving_addresses))))
|
|
|
|
async with self.default_account.change.address_generator_lock:
|
|
|
|
migrated_change = set((await self.default_account.change._generate_keys(0, len(change_addresses))))
|
2018-10-05 01:50:19 +02:00
|
|
|
receiving_addresses = set(map(self.default_account.ledger.public_key_to_address, receiving_addresses))
|
|
|
|
change_addresses = set(map(self.default_account.ledger.public_key_to_address, change_addresses))
|
|
|
|
if not any(change_addresses.difference(migrated_change)):
|
|
|
|
log.info("Successfully migrated %s change addresses.", len(change_addresses))
|
|
|
|
else:
|
|
|
|
log.warning("Failed to migrate %s change addresses!",
|
|
|
|
len(set(change_addresses).difference(set(migrated_change))))
|
|
|
|
if not any(receiving_addresses.difference(migrated_receiving)):
|
|
|
|
log.info("Successfully migrated %s receiving addresses.", len(receiving_addresses))
|
|
|
|
else:
|
|
|
|
log.warning("Failed to migrate %s receiving addresses!",
|
|
|
|
len(set(receiving_addresses).difference(set(migrated_receiving))))
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
2018-09-27 06:56:31 +02:00
|
|
|
return self.ledger.headers.hash(self.ledger.headers.height).decode()
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_unused_address(self):
|
2018-07-12 18:44:19 +02:00
|
|
|
return self.default_account.receiving.get_or_create_usable_address()
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-05-29 05:12:11 +02:00
|
|
|
def get_new_address(self):
|
|
|
|
return self.get_unused_address()
|
|
|
|
|
2018-09-14 07:59:04 +02:00
|
|
|
def reserve_points(self, address, amount: int):
|
2018-05-26 05:26:07 +02:00
|
|
|
# TODO: check if we have enough to cover amount
|
|
|
|
return ReservedPoints(address, amount)
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def send_amount_to_address(self, amount: int, destination_address: bytes, account=None):
|
2018-09-21 15:47:06 +02:00
|
|
|
account = account or self.default_account
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.pay(amount, destination_address, [account], account)
|
|
|
|
await account.ledger.broadcast(tx)
|
2018-08-23 05:19:04 +02:00
|
|
|
return tx
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def send_claim_to_address(self, claim_id: str, destination_address: str, amount: Optional[int],
|
2018-10-03 18:00:21 +02:00
|
|
|
account=None):
|
|
|
|
account = account or self.default_account
|
2018-11-20 01:38:13 +01:00
|
|
|
claims = await account.get_claims(
|
|
|
|
claim_name_type__any={'is_claim': 1, 'is_update': 1}, # exclude is_supports
|
|
|
|
claim_id=claim_id
|
|
|
|
)
|
2018-10-03 18:00:21 +02:00
|
|
|
if not claims:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise NameError(f"Claim not found: {claim_id}")
|
2018-11-20 01:23:23 +01:00
|
|
|
if not amount:
|
|
|
|
amount = claims[0].get_estimator(self.ledger).effective_amount
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.update(
|
2018-11-20 01:23:23 +01:00
|
|
|
claims[0], ClaimDict.deserialize(claims[0].script.values['claim']), amount,
|
2018-10-03 18:00:21 +02:00
|
|
|
destination_address.encode(), [account], account
|
|
|
|
)
|
2018-10-15 23:16:43 +02:00
|
|
|
await self.ledger.broadcast(tx)
|
2018-10-03 18:00:21 +02:00
|
|
|
return tx
|
|
|
|
|
2018-09-21 15:47:06 +02:00
|
|
|
def send_points_to_address(self, reserved: ReservedPoints, amount: int, account=None):
|
2018-08-23 05:19:04 +02:00
|
|
|
destination_address: bytes = reserved.identifier.encode('latin1')
|
2018-09-21 15:47:06 +02:00
|
|
|
return self.send_amount_to_address(amount, destination_address, account)
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
|
|
|
return LBRYcrdAddressQueryHandlerFactory(self)
|
|
|
|
|
|
|
|
def get_info_exchanger(self):
|
|
|
|
return LBRYcrdAddressRequester(self)
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def resolve(self, *uris, **kwargs):
|
2018-07-10 06:30:13 +02:00
|
|
|
page = kwargs.get('page', 0)
|
|
|
|
page_size = kwargs.get('page_size', 10)
|
2018-08-16 01:23:06 +02:00
|
|
|
check_cache = kwargs.get('check_cache', False) # TODO: put caching back (was force_refresh parameter)
|
2018-10-15 23:16:43 +02:00
|
|
|
ledger: MainNetLedger = self.default_account.ledger
|
|
|
|
results = await ledger.resolve(page, page_size, *uris)
|
2018-11-25 22:42:25 +01:00
|
|
|
if 'error' not in results:
|
2018-11-30 22:11:23 +01:00
|
|
|
await self.old_db.save_claims_for_resolve([
|
|
|
|
value for value in results.values() if 'error' not in value
|
|
|
|
]).asFuture(asyncio.get_event_loop())
|
2018-10-15 23:16:43 +02:00
|
|
|
return results
|
2018-05-29 05:12:11 +02:00
|
|
|
|
2018-11-30 04:18:32 +01:00
|
|
|
async def get_claims_for_name(self, name: str):
|
|
|
|
response = await self.ledger.network.get_claims_for_name(name)
|
|
|
|
if 'claims' in response:
|
|
|
|
to_resolve = [(claim['name'] + '#' + claim['claim_id']) for claim in response['claims']]
|
|
|
|
response['claims'] = [resolution['claim'] for resolution in (await self.resolve(*to_resolve)).values()]
|
|
|
|
return response
|
2018-09-21 22:48:49 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def address_is_mine(self, unknown_address, account):
|
|
|
|
match = await self.ledger.db.get_address(address=unknown_address, account=account)
|
2018-10-08 16:41:07 +02:00
|
|
|
if match is not None:
|
|
|
|
return True
|
2018-09-21 15:47:06 +02:00
|
|
|
return False
|
2018-05-29 05:12:11 +02:00
|
|
|
|
2018-12-04 01:40:18 +01:00
|
|
|
async def get_transaction(self, txid):
|
|
|
|
tx = await self.db.get_transaction(txid=txid)
|
|
|
|
if not tx:
|
|
|
|
try:
|
|
|
|
_raw = await self.ledger.network.get_transaction(txid)
|
|
|
|
except CodeMessageError as e:
|
|
|
|
return {'success': False, 'code': e.code, 'message': e.message}
|
|
|
|
# this is a workaround for the current protocol. Should be fixed when lbryum support is over and we
|
|
|
|
# are able to use the modern get_transaction call, which accepts verbose to show height and other fields
|
|
|
|
height = await self.ledger.network.get_transaction_height(txid)
|
|
|
|
tx = self.ledger.transaction_class(unhexlify(_raw))
|
|
|
|
if tx and height > 0:
|
|
|
|
await self.ledger.maybe_verify_transaction(tx, height + 1) # off by one from server side, yes...
|
|
|
|
return tx
|
2018-09-19 15:58:50 +02:00
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
@staticmethod
|
2018-10-15 23:16:43 +02:00
|
|
|
async def get_history(account: BaseAccount, **constraints):
|
2018-09-26 04:40:52 +02:00
|
|
|
headers = account.ledger.headers
|
2018-10-15 23:16:43 +02:00
|
|
|
txs = await account.get_transactions(**constraints)
|
2018-09-26 04:40:52 +02:00
|
|
|
history = []
|
|
|
|
for tx in txs:
|
2018-10-18 03:10:23 +02:00
|
|
|
ts = headers[tx.height]['timestamp'] if tx.height > 0 else None
|
2018-10-09 20:46:44 +02:00
|
|
|
item = {
|
2018-09-26 04:40:52 +02:00
|
|
|
'txid': tx.id,
|
|
|
|
'timestamp': ts,
|
2018-10-18 03:10:23 +02:00
|
|
|
'date': datetime.fromtimestamp(ts).isoformat(' ')[:-3] if tx.height > 0 else None,
|
|
|
|
'confirmations': headers.height - tx.height if tx.height > 0 else 0,
|
2018-11-05 06:09:30 +01:00
|
|
|
'claim_info': [],
|
|
|
|
'update_info': [],
|
|
|
|
'support_info': [],
|
|
|
|
'abandon_info': []
|
|
|
|
}
|
2018-11-28 22:28:30 +01:00
|
|
|
is_my_inputs = all([txi.is_my_account for txi in tx.inputs])
|
|
|
|
if is_my_inputs:
|
2018-11-28 20:41:45 +01:00
|
|
|
# fees only matter if we are the ones paying them
|
|
|
|
item['value'] = dewies_to_lbc(tx.net_account_balance+tx.fee)
|
|
|
|
item['fee'] = dewies_to_lbc(-tx.fee)
|
2018-11-28 16:57:32 +01:00
|
|
|
else:
|
2018-11-28 20:41:45 +01:00
|
|
|
# someone else paid the fees
|
2018-11-28 16:57:32 +01:00
|
|
|
item['value'] = dewies_to_lbc(tx.net_account_balance)
|
2018-11-28 20:41:45 +01:00
|
|
|
item['fee'] = '0.0'
|
2018-11-05 06:09:30 +01:00
|
|
|
for txo in tx.my_claim_outputs:
|
|
|
|
item['claim_info'].append({
|
2018-09-26 04:40:52 +02:00
|
|
|
'address': txo.get_address(account.ledger),
|
2018-10-03 22:38:47 +02:00
|
|
|
'balance_delta': dewies_to_lbc(-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2018-09-26 04:40:52 +02:00
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
2018-11-05 06:09:30 +01:00
|
|
|
})
|
|
|
|
for txo in tx.my_update_outputs:
|
2018-11-28 22:28:30 +01:00
|
|
|
if is_my_inputs: # updating my own claim
|
|
|
|
previous = None
|
|
|
|
for txi in tx.inputs:
|
|
|
|
if txi.txo_ref.txo is not None:
|
|
|
|
other_txo = txi.txo_ref.txo
|
2018-12-05 20:38:35 +01:00
|
|
|
if (other_txo.is_claim or other_txo.script.is_support_claim) \
|
|
|
|
and other_txo.claim_id == txo.claim_id:
|
2018-11-28 22:28:30 +01:00
|
|
|
previous = other_txo
|
|
|
|
break
|
2018-11-28 23:09:31 +01:00
|
|
|
assert previous is not None,\
|
|
|
|
"Invalid claim update state, expected to find previous claim in input."
|
2018-11-28 22:28:30 +01:00
|
|
|
item['update_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
|
|
|
'balance_delta': dewies_to_lbc(previous.amount-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
|
|
|
else: # someone sent us their claim
|
|
|
|
item['update_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
|
|
|
'balance_delta': dewies_to_lbc(0),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-11-05 06:09:30 +01:00
|
|
|
for txo in tx.my_support_outputs:
|
|
|
|
item['support_info'].append({
|
2018-09-26 04:40:52 +02:00
|
|
|
'address': txo.get_address(account.ledger),
|
2018-11-28 22:28:30 +01:00
|
|
|
'balance_delta': dewies_to_lbc(txo.amount if not is_my_inputs else -txo.amount),
|
2018-10-03 22:38:47 +02:00
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2018-09-26 04:40:52 +02:00
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
2018-11-28 22:28:30 +01:00
|
|
|
'is_tip': not is_my_inputs,
|
2018-09-26 04:40:52 +02:00
|
|
|
'nout': txo.position
|
2018-11-05 06:09:30 +01:00
|
|
|
})
|
|
|
|
for txo in tx.other_support_outputs:
|
|
|
|
item['support_info'].append({
|
2018-09-26 04:40:52 +02:00
|
|
|
'address': txo.get_address(account.ledger),
|
2018-10-03 22:38:47 +02:00
|
|
|
'balance_delta': dewies_to_lbc(-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2018-09-26 04:40:52 +02:00
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
2018-11-28 22:28:30 +01:00
|
|
|
'is_tip': is_my_inputs,
|
2018-09-26 04:40:52 +02:00
|
|
|
'nout': txo.position
|
2018-11-05 06:09:30 +01:00
|
|
|
})
|
|
|
|
for txo in tx.my_abandon_outputs:
|
|
|
|
item['abandon_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
2018-11-28 16:09:13 +01:00
|
|
|
'balance_delta': dewies_to_lbc(txo.amount),
|
2018-11-05 06:09:30 +01:00
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-10-09 20:46:44 +02:00
|
|
|
history.append(item)
|
2018-09-26 04:40:52 +02:00
|
|
|
return history
|
2018-05-29 05:12:11 +02:00
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
@staticmethod
|
|
|
|
def get_utxos(account: BaseAccount):
|
2018-10-08 16:41:07 +02:00
|
|
|
return account.get_utxos()
|
2018-09-19 15:58:50 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
async def claim_name(self, account, name, amount, claim_dict, certificate=None, claim_address=None):
|
2018-07-12 19:23:18 +02:00
|
|
|
claim = ClaimDict.load_dict(claim_dict)
|
2018-07-12 05:18:59 +02:00
|
|
|
if not claim_address:
|
2018-10-15 23:16:43 +02:00
|
|
|
claim_address = await account.receiving.get_or_create_usable_address()
|
2018-07-12 05:18:59 +02:00
|
|
|
if certificate:
|
|
|
|
claim = claim.sign(
|
2018-10-12 15:49:13 +02:00
|
|
|
certificate.private_key, claim_address, certificate.claim_id, curve=SECP256k1
|
2018-07-12 05:18:59 +02:00
|
|
|
)
|
2018-11-20 01:38:13 +01:00
|
|
|
existing_claims = await account.get_claims(
|
|
|
|
claim_name_type__any={'is_claim': 1, 'is_update': 1}, # exclude is_supports
|
|
|
|
claim_name=name
|
|
|
|
)
|
2018-08-04 18:10:41 +02:00
|
|
|
if len(existing_claims) == 0:
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.claim(
|
2018-08-04 18:10:41 +02:00
|
|
|
name, claim, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
elif len(existing_claims) == 1:
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.update(
|
2018-08-04 18:10:41 +02:00
|
|
|
existing_claims[0], claim, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
else:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise NameError(f"More than one other claim exists with the name '{name}'.")
|
2018-10-15 23:16:43 +02:00
|
|
|
await account.ledger.broadcast(tx)
|
|
|
|
await self.old_db.save_claims([self._old_get_temp_claim_info(
|
2018-11-30 22:11:23 +01:00
|
|
|
tx, tx.outputs[0], claim_address, claim_dict, name, dewies_to_lbc(amount)
|
2018-10-16 21:04:20 +02:00
|
|
|
)]).asFuture(asyncio.get_event_loop())
|
2018-07-12 05:18:59 +02:00
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
2018-10-03 18:00:21 +02:00
|
|
|
return tx
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def support_claim(self, claim_name, claim_id, amount, account):
|
|
|
|
holding_address = await account.receiving.get_or_create_usable_address()
|
|
|
|
tx = await Transaction.support(claim_name, claim_id, amount, holding_address, [account], account)
|
|
|
|
await account.ledger.broadcast(tx)
|
2018-11-30 22:11:23 +01:00
|
|
|
await self.old_db.save_supports(claim_id, [{
|
|
|
|
'txid': tx.id,
|
|
|
|
'nout': tx.position,
|
|
|
|
'address': holding_address,
|
|
|
|
'claim_id': claim_id,
|
|
|
|
'amount': dewies_to_lbc(amount)
|
|
|
|
}]).asFuture(asyncio.get_event_loop())
|
2018-08-29 21:43:05 +02:00
|
|
|
return tx
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def tip_claim(self, amount, claim_id, account):
|
|
|
|
claim_to_tip = await self.get_claim_by_claim_id(claim_id)
|
|
|
|
tx = await Transaction.support(
|
2018-08-31 22:49:55 +02:00
|
|
|
claim_to_tip['name'], claim_id, amount, claim_to_tip['address'], [account], account
|
|
|
|
)
|
2018-10-15 23:16:43 +02:00
|
|
|
await account.ledger.broadcast(tx)
|
2018-11-30 22:11:23 +01:00
|
|
|
await self.old_db.save_supports(claim_id, [{
|
|
|
|
'txid': tx.id,
|
|
|
|
'nout': tx.position,
|
|
|
|
'address': claim_to_tip['address'],
|
|
|
|
'claim_id': claim_id,
|
|
|
|
'amount': dewies_to_lbc(amount)
|
|
|
|
}]).asFuture(asyncio.get_event_loop())
|
2018-08-31 22:49:55 +02:00
|
|
|
return tx
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def abandon_claim(self, claim_id, txid, nout, account):
|
|
|
|
claim = await account.get_claim(claim_id=claim_id, txid=txid, nout=nout)
|
2018-09-19 22:58:50 +02:00
|
|
|
if not claim:
|
|
|
|
raise Exception('No claim found for the specified claim_id or txid:nout')
|
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.abandon(claim, [account], account)
|
|
|
|
await account.ledger.broadcast(tx)
|
2018-08-01 04:59:51 +02:00
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
2018-10-15 23:16:43 +02:00
|
|
|
return tx
|
2018-08-01 04:59:51 +02:00
|
|
|
|
2018-10-18 01:07:17 +02:00
|
|
|
async def claim_new_channel(self, channel_name, amount, account):
|
2018-10-15 23:16:43 +02:00
|
|
|
address = await account.receiving.get_or_create_usable_address()
|
2018-07-05 04:16:02 +02:00
|
|
|
cert, key = generate_certificate()
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.claim(channel_name, cert, amount, address, [account], account)
|
|
|
|
await account.ledger.broadcast(tx)
|
2018-08-03 18:31:50 +02:00
|
|
|
account.add_certificate_private_key(tx.outputs[0].ref, key.decode())
|
2018-07-05 04:16:02 +02:00
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
2018-11-30 22:11:23 +01:00
|
|
|
|
|
|
|
await self.old_db.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, tx.outputs[0], address, cert, channel_name, dewies_to_lbc(amount)
|
|
|
|
)]).asFuture(asyncio.get_event_loop())
|
2018-10-15 23:16:43 +02:00
|
|
|
return tx
|
2018-06-12 17:53:29 +02:00
|
|
|
|
2018-11-20 01:23:23 +01:00
|
|
|
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
|
|
|
|
return {
|
|
|
|
"claim_id": txo.claim_id,
|
|
|
|
"name": name,
|
|
|
|
"amount": bid,
|
|
|
|
"address": address,
|
|
|
|
"txid": tx.id,
|
|
|
|
"nout": txo.position,
|
|
|
|
"value": claim_dict,
|
|
|
|
"height": -1,
|
|
|
|
"claim_sequence": -1,
|
|
|
|
}
|
|
|
|
|
2018-10-03 18:00:21 +02:00
|
|
|
def get_certificates(self, private_key_accounts, exclude_without_key=True, **constraints):
|
|
|
|
return self.db.get_certificates(
|
|
|
|
private_key_accounts=private_key_accounts,
|
|
|
|
exclude_without_key=exclude_without_key,
|
|
|
|
**constraints
|
|
|
|
)
|
2018-07-12 18:14:47 +02:00
|
|
|
|
2018-07-10 06:30:13 +02:00
|
|
|
def update_peer_address(self, peer, address):
|
|
|
|
pass # TODO: Data payments is disabled
|
|
|
|
|
|
|
|
def get_unused_address_for_peer(self, peer):
|
|
|
|
# TODO: Data payments is disabled
|
|
|
|
return self.get_unused_address()
|
|
|
|
|
|
|
|
def add_expected_payment(self, peer, amount):
|
|
|
|
pass # TODO: Data payments is disabled
|
|
|
|
|
|
|
|
def send_points(self, reserved_points, amount):
|
2018-10-16 05:12:23 +02:00
|
|
|
defer.succeed(True) # TODO: Data payments is disabled
|
2018-07-10 06:30:13 +02:00
|
|
|
|
|
|
|
def cancel_point_reservation(self, reserved_points):
|
|
|
|
pass # fixme: disabled for now.
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-07-12 18:18:58 +02:00
|
|
|
def save(self):
|
|
|
|
for wallet in self.wallets:
|
|
|
|
wallet.save()
|
|
|
|
|
2018-08-31 19:12:13 +02:00
|
|
|
def get_block(self, block_hash=None, height=None):
|
|
|
|
if height is None:
|
|
|
|
height = self.ledger.headers.height
|
|
|
|
if block_hash is None:
|
|
|
|
block_hash = self.ledger.headers.hash(height).decode()
|
2018-08-28 02:03:08 +02:00
|
|
|
return self.ledger.network.get_block(block_hash)
|
|
|
|
|
|
|
|
def get_claim_by_claim_id(self, claim_id):
|
|
|
|
return self.ledger.get_claim_by_claim_id(claim_id)
|
|
|
|
|
|
|
|
def get_claim_by_outpoint(self, txid, nout):
|
|
|
|
return self.ledger.get_claim_by_outpoint(txid, nout)
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class ClientRequest:
|
2018-05-26 05:26:07 +02:00
|
|
|
def __init__(self, request_dict, response_identifier=None):
|
|
|
|
self.request_dict = request_dict
|
|
|
|
self.response_identifier = response_identifier
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressRequester:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'lbrycrd_address': True}, 'lbrycrd_address')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_address_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
|
|
|
def _handle_address_response(self, response_dict, peer, request, protocol):
|
|
|
|
if request.response_identifier not in response_dict:
|
|
|
|
raise ValueError(
|
2018-10-18 12:42:45 +02:00
|
|
|
f"Expected {request.response_identifier} in response but did not get it")
|
2018-05-26 05:26:07 +02:00
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
address = response_dict[request.response_identifier]
|
|
|
|
self.wallet.update_peer_address(peer, address)
|
|
|
|
|
|
|
|
def _request_failed(self, error, peer):
|
2018-07-05 05:16:52 +02:00
|
|
|
raise Exception(
|
|
|
|
"A peer failed to send a valid public key response. Error: {}, peer: {}".format(
|
|
|
|
error.getErrorMessage(), str(peer)
|
|
|
|
)
|
|
|
|
)
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressQueryHandlerFactory:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = LBRYcrdAddressQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'lbrycrd_address'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return "LBRYcrd Address - an address for receiving payments via LBRYcrd"
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressQueryHandler:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['lbrycrd_address']
|
|
|
|
self.address = None
|
|
|
|
self.peer = None
|
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
2018-10-17 21:07:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-26 05:26:07 +02:00
|
|
|
def handle_queries(self, queries):
|
2018-10-17 21:07:52 +02:00
|
|
|
if self.query_identifiers[0] in queries:
|
|
|
|
future = self.wallet.get_unused_address_for_peer(self.peer)
|
|
|
|
address = yield defer.Deferred.fromFuture(asyncio.ensure_future(future))
|
2018-05-26 05:26:07 +02:00
|
|
|
self.address = address
|
|
|
|
fields = {'lbrycrd_address': address}
|
|
|
|
return fields
|
|
|
|
if self.address is None:
|
|
|
|
raise Exception("Expected a request for an address, but did not receive one")
|
|
|
|
else:
|
2018-10-17 21:07:52 +02:00
|
|
|
return {}
|