2018-05-26 05:26:07 +02:00
|
|
|
import os
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2018-08-06 06:28:11 +02:00
|
|
|
import logging
|
2018-03-26 04:59:57 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2018-07-29 06:16:57 +02:00
|
|
|
from torba.basemanager import BaseWalletManager
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-07-12 19:23:18 +02:00
|
|
|
from lbryschema.claim import ClaimDict
|
2018-07-01 23:21:18 +02:00
|
|
|
|
2018-07-29 06:16:57 +02:00
|
|
|
from .ledger import MainNetLedger
|
2018-07-05 04:16:02 +02:00
|
|
|
from .account import generate_certificate
|
|
|
|
from .transaction import Transaction
|
2018-07-29 06:16:57 +02:00
|
|
|
from .database import WalletDatabase
|
2018-06-14 21:18:36 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-08-23 05:19:04 +02:00
|
|
|
class ReservedPoints:
|
|
|
|
def __init__(self, identifier, amount):
|
|
|
|
self.identifier = identifier
|
|
|
|
self.amount = amount
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class BackwardsCompatibleNetwork:
|
2018-05-26 05:26:07 +02:00
|
|
|
def __init__(self, manager):
|
|
|
|
self.manager = manager
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def get_local_height(self):
|
2018-07-26 05:29:13 +02:00
|
|
|
for ledger in self.manager.ledgers.values():
|
|
|
|
assert isinstance(ledger, MainNetLedger)
|
|
|
|
return ledger.headers.height
|
2018-04-30 09:04:52 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def get_server_height(self):
|
|
|
|
return self.get_local_height()
|
|
|
|
|
|
|
|
|
|
|
|
class LbryWalletManager(BaseWalletManager):
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def ledger(self) -> MainNetLedger:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.default_account.ledger
|
|
|
|
|
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def db(self) -> WalletDatabase:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.ledger.db
|
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
@property
|
|
|
|
def wallet(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
@property
|
|
|
|
def network(self):
|
|
|
|
return BackwardsCompatibleNetwork(self)
|
2018-04-23 04:23:42 +02:00
|
|
|
|
|
|
|
@property
|
2018-05-26 05:26:07 +02:00
|
|
|
def use_encryption(self):
|
|
|
|
# TODO: implement this
|
|
|
|
return False
|
2018-03-26 04:59:57 +02:00
|
|
|
|
|
|
|
@property
|
2018-05-26 05:26:07 +02:00
|
|
|
def is_first_run(self):
|
|
|
|
return True
|
|
|
|
|
2018-08-22 15:41:59 +02:00
|
|
|
@property
|
|
|
|
def is_wallet_unlocked(self):
|
|
|
|
return True
|
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def check_locked(self):
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
2018-08-17 16:35:56 +02:00
|
|
|
@staticmethod
|
|
|
|
def migrate_lbryum_to_torba(path):
|
|
|
|
if not os.path.exists(path):
|
|
|
|
return
|
|
|
|
with open(path, 'r') as f:
|
|
|
|
unmigrated_json = f.read()
|
|
|
|
unmigrated = json.loads(unmigrated_json)
|
|
|
|
# TODO: After several public releases of new torba based wallet, we can delete
|
|
|
|
# this lbryum->torba conversion code and require that users who still
|
|
|
|
# have old structured wallets install one of the earlier releases that
|
|
|
|
# still has the below conversion code.
|
|
|
|
if 'master_public_keys' not in unmigrated:
|
|
|
|
return
|
|
|
|
migrated_json = json.dumps({
|
|
|
|
'version': 1,
|
|
|
|
'name': 'My Wallet',
|
|
|
|
'accounts': [{
|
|
|
|
'version': 1,
|
|
|
|
'name': 'Main Account',
|
|
|
|
'ledger': 'lbc_mainnet',
|
|
|
|
'encrypted': unmigrated['use_encryption'],
|
|
|
|
'seed': unmigrated['seed'],
|
|
|
|
'seed_version': unmigrated['seed_version'],
|
|
|
|
'private_key': unmigrated['master_private_keys']['x/'],
|
|
|
|
'public_key': unmigrated['master_public_keys']['x/'],
|
|
|
|
'certificates': unmigrated.get('claim_certificates', {}),
|
|
|
|
'address_generator': {
|
|
|
|
'name': 'deterministic-chain',
|
|
|
|
'receiving': {'gap': 20, 'maximum_uses_per_address': 2},
|
|
|
|
'change': {'gap': 6, 'maximum_uses_per_address': 2}
|
|
|
|
}
|
|
|
|
}]
|
|
|
|
}, indent=4, sort_keys=True)
|
|
|
|
mode = os.stat(path).st_mode
|
|
|
|
i = 1
|
|
|
|
backup_path_template = os.path.join(os.path.dirname(path), "old_lbryum_wallet") + "_%i"
|
|
|
|
while os.path.isfile(backup_path_template % i):
|
|
|
|
i += 1
|
|
|
|
os.rename(path, backup_path_template % i)
|
|
|
|
temp_path = "%s.tmp.%s" % (path, os.getpid())
|
|
|
|
with open(temp_path, "w") as f:
|
|
|
|
f.write(migrated_json)
|
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
|
|
|
os.rename(temp_path, path)
|
|
|
|
os.chmod(path, mode)
|
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
@classmethod
|
2018-07-12 05:18:59 +02:00
|
|
|
def from_lbrynet_config(cls, settings, db):
|
2018-07-01 23:21:18 +02:00
|
|
|
|
|
|
|
ledger_id = {
|
|
|
|
'lbrycrd_main': 'lbc_mainnet',
|
|
|
|
'lbrycrd_testnet': 'lbc_testnet',
|
|
|
|
'lbrycrd_regtest': 'lbc_regtest'
|
|
|
|
}[settings['blockchain_name']]
|
|
|
|
|
|
|
|
ledger_config = {
|
|
|
|
'auto_connect': True,
|
|
|
|
'default_servers': settings['lbryum_servers'],
|
2018-07-07 07:11:27 +02:00
|
|
|
'data_path': settings['lbryum_wallet_dir'],
|
2018-07-12 05:18:59 +02:00
|
|
|
'use_keyring': settings['use_keyring'],
|
2018-07-12 07:05:24 +02:00
|
|
|
#'db': db
|
2018-07-01 23:21:18 +02:00
|
|
|
}
|
|
|
|
|
2018-08-23 08:55:17 +02:00
|
|
|
wallets_directory = os.path.join(settings['lbryum_wallet_dir'], 'wallets')
|
|
|
|
if not os.path.exists(wallets_directory):
|
|
|
|
os.mkdir(wallets_directory)
|
|
|
|
|
|
|
|
wallet_file_path = os.path.join(wallets_directory, 'default_wallet')
|
2018-08-17 16:35:56 +02:00
|
|
|
|
|
|
|
cls.migrate_lbryum_to_torba(wallet_file_path)
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
manager = cls.from_config({
|
2018-07-01 23:21:18 +02:00
|
|
|
'ledgers': {ledger_id: ledger_config},
|
2018-07-12 05:18:59 +02:00
|
|
|
'wallets': [wallet_file_path]
|
2018-05-26 05:26:07 +02:00
|
|
|
})
|
2018-08-06 06:28:11 +02:00
|
|
|
if manager.default_account is None:
|
|
|
|
ledger = manager.get_or_create_ledger('lbc_mainnet')
|
|
|
|
log.info('Wallet at %s is empty, generating a default account.', wallet_file_path)
|
|
|
|
manager.default_wallet.generate_account(ledger)
|
|
|
|
manager.default_wallet.save()
|
|
|
|
return manager
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
|
|
|
return defer.succeed('')
|
|
|
|
|
|
|
|
def get_unused_address(self):
|
2018-07-12 18:44:19 +02:00
|
|
|
return self.default_account.receiving.get_or_create_usable_address()
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-05-29 05:12:11 +02:00
|
|
|
def get_new_address(self):
|
|
|
|
return self.get_unused_address()
|
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def list_addresses(self):
|
|
|
|
return self.default_account.get_addresses()
|
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def reserve_points(self, address, amount):
|
|
|
|
# TODO: check if we have enough to cover amount
|
|
|
|
return ReservedPoints(address, amount)
|
|
|
|
|
2018-08-23 05:19:04 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def send_amount_to_address(self, amount: int, destination_address: bytes):
|
|
|
|
account = self.default_account
|
|
|
|
tx = yield Transaction.pay(amount, destination_address, [account], account)
|
|
|
|
yield account.ledger.broadcast(tx)
|
|
|
|
return tx
|
|
|
|
|
|
|
|
def send_points_to_address(self, reserved: ReservedPoints, amount: int):
|
|
|
|
destination_address: bytes = reserved.identifier.encode('latin1')
|
2018-05-29 05:12:11 +02:00
|
|
|
return self.send_amount_to_address(amount, destination_address)
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_wallet_info_query_handler_factory(self):
|
|
|
|
return LBRYcrdAddressQueryHandlerFactory(self)
|
|
|
|
|
|
|
|
def get_info_exchanger(self):
|
|
|
|
return LBRYcrdAddressRequester(self)
|
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-07-01 23:21:18 +02:00
|
|
|
def resolve(self, *uris, **kwargs):
|
2018-07-10 06:30:13 +02:00
|
|
|
page = kwargs.get('page', 0)
|
|
|
|
page_size = kwargs.get('page_size', 10)
|
2018-08-16 01:23:06 +02:00
|
|
|
check_cache = kwargs.get('check_cache', False) # TODO: put caching back (was force_refresh parameter)
|
2018-07-01 23:21:18 +02:00
|
|
|
ledger = self.default_account.ledger # type: MainNetLedger
|
2018-08-16 23:46:46 +02:00
|
|
|
results = yield ledger.resolve(page, page_size, *uris)
|
2018-08-16 01:23:06 +02:00
|
|
|
yield self.old_db.save_claims_for_resolve(
|
|
|
|
(value for value in results.values() if 'error' not in value))
|
|
|
|
defer.returnValue(results)
|
2018-05-29 05:12:11 +02:00
|
|
|
|
|
|
|
def get_name_claims(self):
|
|
|
|
return defer.succeed([])
|
|
|
|
|
|
|
|
def address_is_mine(self, address):
|
|
|
|
return defer.succeed(True)
|
|
|
|
|
|
|
|
def get_history(self):
|
|
|
|
return defer.succeed([])
|
|
|
|
|
2018-07-12 05:18:59 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-07-12 19:23:18 +02:00
|
|
|
def claim_name(self, name, amount, claim_dict, certificate=None, claim_address=None):
|
2018-07-12 05:18:59 +02:00
|
|
|
account = self.default_account
|
2018-07-12 19:23:18 +02:00
|
|
|
claim = ClaimDict.load_dict(claim_dict)
|
2018-07-12 05:18:59 +02:00
|
|
|
if not claim_address:
|
|
|
|
claim_address = yield account.receiving.get_or_create_usable_address()
|
|
|
|
if certificate:
|
|
|
|
claim = claim.sign(
|
2018-07-12 18:14:47 +02:00
|
|
|
certificate.private_key, claim_address, certificate.claim_id
|
2018-07-12 05:18:59 +02:00
|
|
|
)
|
2018-08-04 18:10:41 +02:00
|
|
|
existing_claims = yield account.get_unspent_outputs(include_claims=True, claim_name=name)
|
|
|
|
if len(existing_claims) == 0:
|
|
|
|
tx = yield Transaction.claim(
|
|
|
|
name, claim, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
elif len(existing_claims) == 1:
|
|
|
|
tx = yield Transaction.update(
|
|
|
|
existing_claims[0], claim, amount, claim_address, [account], account
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise NameError("More than one other claim exists with the name '{}'.".format(name))
|
2018-07-12 05:18:59 +02:00
|
|
|
yield account.ledger.broadcast(tx)
|
2018-07-12 21:44:07 +02:00
|
|
|
yield self.old_db.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, tx.outputs[0], claim_address, claim_dict, name, amount
|
|
|
|
)])
|
2018-07-12 05:18:59 +02:00
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
|
|
|
defer.returnValue(tx)
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-07-12 21:44:07 +02:00
|
|
|
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
|
|
|
|
return {
|
2018-08-04 18:10:41 +02:00
|
|
|
"claim_id": txo.claim_id,
|
2018-07-12 21:44:07 +02:00
|
|
|
"name": name,
|
|
|
|
"amount": bid,
|
2018-07-17 05:32:37 +02:00
|
|
|
"address": address,
|
|
|
|
"txid": tx.id,
|
2018-07-16 00:09:35 +02:00
|
|
|
"nout": txo.position,
|
2018-07-12 21:44:07 +02:00
|
|
|
"value": claim_dict,
|
|
|
|
"height": -1,
|
|
|
|
"claim_sequence": -1,
|
|
|
|
}
|
|
|
|
|
2018-08-29 21:43:05 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-09-04 21:05:45 +02:00
|
|
|
def support_claim(self, claim_name, claim_id, amount, account):
|
2018-08-29 21:43:05 +02:00
|
|
|
holding_address = yield account.receiving.get_or_create_usable_address()
|
|
|
|
tx = yield Transaction.support(claim_name, claim_id, amount, holding_address, [account], account)
|
|
|
|
yield account.ledger.broadcast(tx)
|
|
|
|
return tx
|
|
|
|
|
2018-08-31 22:49:55 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-09-04 21:05:45 +02:00
|
|
|
def tip_claim(self, amount, claim_id, account):
|
2018-08-31 22:49:55 +02:00
|
|
|
claim_to_tip = yield self.get_claim_by_claim_id(claim_id)
|
|
|
|
tx = yield Transaction.support(
|
|
|
|
claim_to_tip['name'], claim_id, amount, claim_to_tip['address'], [account], account
|
|
|
|
)
|
|
|
|
yield account.ledger.broadcast(tx)
|
|
|
|
return tx
|
|
|
|
|
2018-08-01 04:59:51 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def abandon_claim(self, claim_id, txid, nout):
|
|
|
|
account = self.default_account
|
|
|
|
claim = yield account.get_claim(claim_id)
|
|
|
|
tx = yield Transaction.abandon(claim, [account], account)
|
|
|
|
yield account.ledger.broadcast(tx)
|
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
|
|
|
defer.returnValue(tx)
|
|
|
|
|
2018-07-05 04:16:02 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def claim_new_channel(self, channel_name, amount):
|
2018-06-12 17:53:29 +02:00
|
|
|
account = self.default_account
|
2018-07-05 04:16:02 +02:00
|
|
|
address = yield account.receiving.get_or_create_usable_address()
|
|
|
|
cert, key = generate_certificate()
|
2018-08-04 18:10:41 +02:00
|
|
|
tx = yield Transaction.claim(channel_name, cert, amount, address, [account], account)
|
2018-07-05 04:16:02 +02:00
|
|
|
yield account.ledger.broadcast(tx)
|
2018-08-03 18:31:50 +02:00
|
|
|
account.add_certificate_private_key(tx.outputs[0].ref, key.decode())
|
2018-07-05 04:16:02 +02:00
|
|
|
# TODO: release reserved tx outputs in case anything fails by this point
|
|
|
|
defer.returnValue(tx)
|
2018-06-12 17:53:29 +02:00
|
|
|
|
2018-08-04 03:39:48 +02:00
|
|
|
def channel_list(self):
|
|
|
|
return self.default_account.get_channels()
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
def get_certificates(self, name):
|
2018-08-27 01:59:32 +02:00
|
|
|
return self.db.get_certificates(name, self.accounts, exclude_without_key=True)
|
2018-07-12 18:14:47 +02:00
|
|
|
|
2018-07-10 06:30:13 +02:00
|
|
|
def update_peer_address(self, peer, address):
|
|
|
|
pass # TODO: Data payments is disabled
|
|
|
|
|
|
|
|
def get_unused_address_for_peer(self, peer):
|
|
|
|
# TODO: Data payments is disabled
|
|
|
|
return self.get_unused_address()
|
|
|
|
|
|
|
|
def add_expected_payment(self, peer, amount):
|
|
|
|
pass # TODO: Data payments is disabled
|
|
|
|
|
|
|
|
def send_points(self, reserved_points, amount):
|
|
|
|
defer.succeed(True) # TODO: Data payments is disabled
|
|
|
|
|
|
|
|
def cancel_point_reservation(self, reserved_points):
|
|
|
|
pass # fixme: disabled for now.
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-07-12 18:18:58 +02:00
|
|
|
def save(self):
|
|
|
|
for wallet in self.wallets:
|
|
|
|
wallet.save()
|
|
|
|
|
2018-08-31 19:12:13 +02:00
|
|
|
def get_block(self, block_hash=None, height=None):
|
|
|
|
if height is None:
|
|
|
|
height = self.ledger.headers.height
|
|
|
|
if block_hash is None:
|
|
|
|
block_hash = self.ledger.headers.hash(height).decode()
|
2018-08-28 02:03:08 +02:00
|
|
|
return self.ledger.network.get_block(block_hash)
|
|
|
|
|
|
|
|
def get_claim_by_claim_id(self, claim_id):
|
|
|
|
return self.ledger.get_claim_by_claim_id(claim_id)
|
|
|
|
|
|
|
|
def get_claim_by_outpoint(self, txid, nout):
|
|
|
|
return self.ledger.get_claim_by_outpoint(txid, nout)
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class ClientRequest:
|
2018-05-26 05:26:07 +02:00
|
|
|
def __init__(self, request_dict, response_identifier=None):
|
|
|
|
self.request_dict = request_dict
|
|
|
|
self.response_identifier = response_identifier
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressRequester:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self._protocols = []
|
|
|
|
|
|
|
|
def send_next_request(self, peer, protocol):
|
|
|
|
if not protocol in self._protocols:
|
|
|
|
r = ClientRequest({'lbrycrd_address': True}, 'lbrycrd_address')
|
|
|
|
d = protocol.add_request(r)
|
|
|
|
d.addCallback(self._handle_address_response, peer, r, protocol)
|
|
|
|
d.addErrback(self._request_failed, peer)
|
|
|
|
self._protocols.append(protocol)
|
|
|
|
return defer.succeed(True)
|
|
|
|
else:
|
|
|
|
return defer.succeed(False)
|
|
|
|
|
|
|
|
def _handle_address_response(self, response_dict, peer, request, protocol):
|
|
|
|
if request.response_identifier not in response_dict:
|
|
|
|
raise ValueError(
|
|
|
|
"Expected {} in response but did not get it".format(request.response_identifier))
|
|
|
|
assert protocol in self._protocols, "Responding protocol is not in our list of protocols"
|
|
|
|
address = response_dict[request.response_identifier]
|
|
|
|
self.wallet.update_peer_address(peer, address)
|
|
|
|
|
|
|
|
def _request_failed(self, error, peer):
|
2018-07-05 05:16:52 +02:00
|
|
|
raise Exception(
|
|
|
|
"A peer failed to send a valid public key response. Error: {}, peer: {}".format(
|
|
|
|
error.getErrorMessage(), str(peer)
|
|
|
|
)
|
|
|
|
)
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressQueryHandlerFactory:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
|
|
|
|
def build_query_handler(self):
|
|
|
|
q_h = LBRYcrdAddressQueryHandler(self.wallet)
|
|
|
|
return q_h
|
|
|
|
|
|
|
|
def get_primary_query_identifier(self):
|
|
|
|
return 'lbrycrd_address'
|
|
|
|
|
|
|
|
def get_description(self):
|
|
|
|
return "LBRYcrd Address - an address for receiving payments via LBRYcrd"
|
|
|
|
|
|
|
|
|
2018-07-22 00:34:59 +02:00
|
|
|
class LBRYcrdAddressQueryHandler:
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def __init__(self, wallet):
|
|
|
|
self.wallet = wallet
|
|
|
|
self.query_identifiers = ['lbrycrd_address']
|
|
|
|
self.address = None
|
|
|
|
self.peer = None
|
|
|
|
|
|
|
|
def register_with_request_handler(self, request_handler, peer):
|
|
|
|
self.peer = peer
|
|
|
|
request_handler.register_query_handler(self, self.query_identifiers)
|
|
|
|
|
|
|
|
def handle_queries(self, queries):
|
|
|
|
|
|
|
|
def create_response(address):
|
|
|
|
self.address = address
|
|
|
|
fields = {'lbrycrd_address': address}
|
|
|
|
return fields
|
2018-03-27 08:40:44 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
if self.query_identifiers[0] in queries:
|
|
|
|
d = self.wallet.get_unused_address_for_peer(self.peer)
|
|
|
|
d.addCallback(create_response)
|
|
|
|
return d
|
|
|
|
if self.address is None:
|
|
|
|
raise Exception("Expected a request for an address, but did not receive one")
|
|
|
|
else:
|
|
|
|
return defer.succeed({})
|