2018-05-26 05:26:07 +02:00
|
|
|
import os
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2018-08-06 06:28:11 +02:00
|
|
|
import logging
|
2019-05-22 12:33:57 +02:00
|
|
|
from binascii import unhexlify, hexlify
|
2018-10-05 01:50:19 +02:00
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
from datetime import datetime
|
|
|
|
|
2018-11-04 07:24:41 +01:00
|
|
|
from torba.client.basemanager import BaseWalletManager
|
2018-12-05 15:17:36 +01:00
|
|
|
from torba.rpc.jsonrpc import CodeMessageError
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.wallet.ledger import MainNetLedger
|
2019-03-20 06:46:23 +01:00
|
|
|
from lbrynet.wallet.account import BaseAccount
|
2019-03-24 23:14:02 +01:00
|
|
|
from lbrynet.wallet.transaction import Transaction
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.wallet.database import WalletDatabase
|
|
|
|
from lbrynet.wallet.dewies import dewies_to_lbc
|
2018-06-14 21:18:36 +02:00
|
|
|
|
2018-09-25 15:41:31 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2019-05-22 12:33:57 +02:00
|
|
|
_NO_PASSWORD_USED_BYTES = b'00'
|
|
|
|
_PASSWORD_USED_BYTES = b'01'
|
|
|
|
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
class LbryWalletManager(BaseWalletManager):
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def ledger(self) -> MainNetLedger:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.default_account.ledger
|
|
|
|
|
|
|
|
@property
|
2018-07-29 06:16:57 +02:00
|
|
|
def db(self) -> WalletDatabase:
|
2018-07-12 18:14:47 +02:00
|
|
|
return self.ledger.db
|
|
|
|
|
2018-04-23 04:23:42 +02:00
|
|
|
@property
|
2018-05-26 05:26:07 +02:00
|
|
|
def use_encryption(self):
|
2018-09-25 15:41:31 +02:00
|
|
|
return self.default_account.serialize_encrypted
|
2018-03-26 04:59:57 +02:00
|
|
|
|
2018-08-22 15:41:59 +02:00
|
|
|
@property
|
|
|
|
def is_wallet_unlocked(self):
|
2018-09-25 15:41:31 +02:00
|
|
|
return not self.default_account.encrypted
|
2018-08-22 15:41:59 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
def check_locked(self):
|
2018-10-15 23:16:43 +02:00
|
|
|
return self.default_account.encrypted
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
def decrypt_account(self, account):
|
|
|
|
assert account.password is not None, "account is not unlocked"
|
|
|
|
assert not account.encrypted, "account is not unlocked"
|
|
|
|
account.serialize_encrypted = False
|
|
|
|
self.save()
|
|
|
|
return not account.encrypted and not account.serialize_encrypted
|
|
|
|
|
|
|
|
def encrypt_account(self, password, account):
|
|
|
|
assert not account.encrypted, "account is already encrypted"
|
|
|
|
account.encrypt(password)
|
|
|
|
account.serialize_encrypted = True
|
|
|
|
self.save()
|
2018-11-29 21:16:53 +01:00
|
|
|
self.unlock_account(password, account)
|
|
|
|
return account.serialize_encrypted
|
2018-09-25 15:41:31 +02:00
|
|
|
|
|
|
|
def unlock_account(self, password, account):
|
|
|
|
assert account.encrypted, "account is not locked"
|
|
|
|
account.decrypt(password)
|
|
|
|
return not account.encrypted
|
|
|
|
|
|
|
|
def lock_account(self, account):
|
|
|
|
assert account.password is not None, "account is already locked"
|
|
|
|
assert not account.encrypted and account.serialize_encrypted, "account is not encrypted"
|
|
|
|
account.encrypt(account.password)
|
|
|
|
return account.encrypted
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-08-17 16:35:56 +02:00
|
|
|
@staticmethod
|
|
|
|
def migrate_lbryum_to_torba(path):
|
|
|
|
if not os.path.exists(path):
|
2018-10-05 01:50:19 +02:00
|
|
|
return None, None
|
2018-08-17 16:35:56 +02:00
|
|
|
with open(path, 'r') as f:
|
|
|
|
unmigrated_json = f.read()
|
|
|
|
unmigrated = json.loads(unmigrated_json)
|
|
|
|
# TODO: After several public releases of new torba based wallet, we can delete
|
|
|
|
# this lbryum->torba conversion code and require that users who still
|
|
|
|
# have old structured wallets install one of the earlier releases that
|
|
|
|
# still has the below conversion code.
|
|
|
|
if 'master_public_keys' not in unmigrated:
|
2018-10-05 01:50:19 +02:00
|
|
|
return None, None
|
|
|
|
total = unmigrated.get('addr_history')
|
|
|
|
receiving_addresses, change_addresses = set(), set()
|
|
|
|
for _, unmigrated_account in unmigrated.get('accounts', {}).items():
|
|
|
|
receiving_addresses.update(map(unhexlify, unmigrated_account.get('receiving', [])))
|
|
|
|
change_addresses.update(map(unhexlify, unmigrated_account.get('change', [])))
|
|
|
|
log.info("Wallet migrator found %s receiving addresses and %s change addresses. %s in total on history.",
|
|
|
|
len(receiving_addresses), len(change_addresses), len(total))
|
|
|
|
|
2018-08-17 16:35:56 +02:00
|
|
|
migrated_json = json.dumps({
|
|
|
|
'version': 1,
|
|
|
|
'name': 'My Wallet',
|
|
|
|
'accounts': [{
|
|
|
|
'version': 1,
|
|
|
|
'name': 'Main Account',
|
|
|
|
'ledger': 'lbc_mainnet',
|
|
|
|
'encrypted': unmigrated['use_encryption'],
|
|
|
|
'seed': unmigrated['seed'],
|
|
|
|
'seed_version': unmigrated['seed_version'],
|
|
|
|
'private_key': unmigrated['master_private_keys']['x/'],
|
|
|
|
'public_key': unmigrated['master_public_keys']['x/'],
|
|
|
|
'certificates': unmigrated.get('claim_certificates', {}),
|
|
|
|
'address_generator': {
|
|
|
|
'name': 'deterministic-chain',
|
2018-11-20 01:23:23 +01:00
|
|
|
'receiving': {'gap': 20, 'maximum_uses_per_address': 1},
|
|
|
|
'change': {'gap': 6, 'maximum_uses_per_address': 1}
|
2018-08-17 16:35:56 +02:00
|
|
|
}
|
|
|
|
}]
|
|
|
|
}, indent=4, sort_keys=True)
|
|
|
|
mode = os.stat(path).st_mode
|
|
|
|
i = 1
|
|
|
|
backup_path_template = os.path.join(os.path.dirname(path), "old_lbryum_wallet") + "_%i"
|
|
|
|
while os.path.isfile(backup_path_template % i):
|
|
|
|
i += 1
|
|
|
|
os.rename(path, backup_path_template % i)
|
2018-10-18 12:42:45 +02:00
|
|
|
temp_path = "{}.tmp.{}".format(path, os.getpid())
|
2018-08-17 16:35:56 +02:00
|
|
|
with open(temp_path, "w") as f:
|
|
|
|
f.write(migrated_json)
|
|
|
|
f.flush()
|
|
|
|
os.fsync(f.fileno())
|
|
|
|
os.rename(temp_path, path)
|
|
|
|
os.chmod(path, mode)
|
2018-10-05 01:50:19 +02:00
|
|
|
return receiving_addresses, change_addresses
|
2018-08-17 16:35:56 +02:00
|
|
|
|
2018-05-26 05:26:07 +02:00
|
|
|
@classmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
async def from_lbrynet_config(cls, settings):
|
2018-07-01 23:21:18 +02:00
|
|
|
|
|
|
|
ledger_id = {
|
|
|
|
'lbrycrd_main': 'lbc_mainnet',
|
|
|
|
'lbrycrd_testnet': 'lbc_testnet',
|
|
|
|
'lbrycrd_regtest': 'lbc_regtest'
|
2019-01-22 22:32:12 +01:00
|
|
|
}[settings.blockchain_name]
|
2018-07-01 23:21:18 +02:00
|
|
|
|
|
|
|
ledger_config = {
|
|
|
|
'auto_connect': True,
|
2019-01-22 22:32:12 +01:00
|
|
|
'default_servers': settings.lbryum_servers,
|
2018-11-27 21:56:11 +01:00
|
|
|
'data_path': settings.wallet_dir,
|
2018-07-01 23:21:18 +02:00
|
|
|
}
|
|
|
|
|
2018-11-27 21:56:11 +01:00
|
|
|
wallets_directory = os.path.join(settings.wallet_dir, 'wallets')
|
2018-08-23 08:55:17 +02:00
|
|
|
if not os.path.exists(wallets_directory):
|
|
|
|
os.mkdir(wallets_directory)
|
|
|
|
|
|
|
|
wallet_file_path = os.path.join(wallets_directory, 'default_wallet')
|
2018-08-17 16:35:56 +02:00
|
|
|
|
2018-10-05 01:50:19 +02:00
|
|
|
receiving_addresses, change_addresses = cls.migrate_lbryum_to_torba(wallet_file_path)
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
manager = cls.from_config({
|
2018-07-01 23:21:18 +02:00
|
|
|
'ledgers': {ledger_id: ledger_config},
|
2018-07-12 05:18:59 +02:00
|
|
|
'wallets': [wallet_file_path]
|
2018-05-26 05:26:07 +02:00
|
|
|
})
|
2018-10-12 18:28:39 +02:00
|
|
|
ledger = manager.get_or_create_ledger(ledger_id)
|
2018-08-06 06:28:11 +02:00
|
|
|
if manager.default_account is None:
|
|
|
|
log.info('Wallet at %s is empty, generating a default account.', wallet_file_path)
|
|
|
|
manager.default_wallet.generate_account(ledger)
|
|
|
|
manager.default_wallet.save()
|
2018-10-05 01:50:19 +02:00
|
|
|
if receiving_addresses or change_addresses:
|
|
|
|
if not os.path.exists(ledger.path):
|
|
|
|
os.mkdir(ledger.path)
|
2018-10-15 23:16:43 +02:00
|
|
|
await ledger.db.open()
|
2018-10-05 01:50:19 +02:00
|
|
|
try:
|
2018-10-15 23:16:43 +02:00
|
|
|
await manager._migrate_addresses(receiving_addresses, change_addresses)
|
2018-10-05 01:50:19 +02:00
|
|
|
finally:
|
2018-10-15 23:16:43 +02:00
|
|
|
await ledger.db.close()
|
|
|
|
return manager
|
2018-10-05 01:50:19 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def _migrate_addresses(self, receiving_addresses: set, change_addresses: set):
|
2018-11-20 22:12:51 +01:00
|
|
|
async with self.default_account.receiving.address_generator_lock:
|
|
|
|
migrated_receiving = set((await self.default_account.receiving._generate_keys(0, len(receiving_addresses))))
|
|
|
|
async with self.default_account.change.address_generator_lock:
|
|
|
|
migrated_change = set((await self.default_account.change._generate_keys(0, len(change_addresses))))
|
2018-10-05 01:50:19 +02:00
|
|
|
receiving_addresses = set(map(self.default_account.ledger.public_key_to_address, receiving_addresses))
|
|
|
|
change_addresses = set(map(self.default_account.ledger.public_key_to_address, change_addresses))
|
|
|
|
if not any(change_addresses.difference(migrated_change)):
|
|
|
|
log.info("Successfully migrated %s change addresses.", len(change_addresses))
|
|
|
|
else:
|
|
|
|
log.warning("Failed to migrate %s change addresses!",
|
|
|
|
len(set(change_addresses).difference(set(migrated_change))))
|
|
|
|
if not any(receiving_addresses.difference(migrated_receiving)):
|
|
|
|
log.info("Successfully migrated %s receiving addresses.", len(receiving_addresses))
|
|
|
|
else:
|
|
|
|
log.warning("Failed to migrate %s receiving addresses!",
|
|
|
|
len(set(receiving_addresses).difference(set(migrated_receiving))))
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_best_blockhash(self):
|
2018-09-27 06:56:31 +02:00
|
|
|
return self.ledger.headers.hash(self.ledger.headers.height).decode()
|
2018-05-26 05:26:07 +02:00
|
|
|
|
|
|
|
def get_unused_address(self):
|
2018-07-12 18:44:19 +02:00
|
|
|
return self.default_account.receiving.get_or_create_usable_address()
|
2018-05-26 05:26:07 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def send_amount_to_address(self, amount: int, destination_address: bytes, account=None):
|
2018-09-21 15:47:06 +02:00
|
|
|
account = account or self.default_account
|
2018-10-15 23:16:43 +02:00
|
|
|
tx = await Transaction.pay(amount, destination_address, [account], account)
|
|
|
|
await account.ledger.broadcast(tx)
|
2018-08-23 05:19:04 +02:00
|
|
|
return tx
|
|
|
|
|
2018-12-04 01:40:18 +01:00
|
|
|
async def get_transaction(self, txid):
|
|
|
|
tx = await self.db.get_transaction(txid=txid)
|
|
|
|
if not tx:
|
|
|
|
try:
|
2019-05-05 22:57:34 +02:00
|
|
|
raw = await self.ledger.network.get_transaction(txid)
|
|
|
|
height = await self.ledger.network.get_transaction_height(txid)
|
2018-12-04 01:40:18 +01:00
|
|
|
except CodeMessageError as e:
|
|
|
|
return {'success': False, 'code': e.code, 'message': e.message}
|
2019-05-05 22:57:34 +02:00
|
|
|
tx = self.ledger.transaction_class(unhexlify(raw))
|
|
|
|
await self.ledger.maybe_verify_transaction(tx, height)
|
2018-12-04 01:40:18 +01:00
|
|
|
return tx
|
2018-09-19 15:58:50 +02:00
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
@staticmethod
|
2018-10-15 23:16:43 +02:00
|
|
|
async def get_history(account: BaseAccount, **constraints):
|
2018-09-26 04:40:52 +02:00
|
|
|
headers = account.ledger.headers
|
2018-10-15 23:16:43 +02:00
|
|
|
txs = await account.get_transactions(**constraints)
|
2018-09-26 04:40:52 +02:00
|
|
|
history = []
|
|
|
|
for tx in txs:
|
2018-10-18 03:10:23 +02:00
|
|
|
ts = headers[tx.height]['timestamp'] if tx.height > 0 else None
|
2018-10-09 20:46:44 +02:00
|
|
|
item = {
|
2018-09-26 04:40:52 +02:00
|
|
|
'txid': tx.id,
|
|
|
|
'timestamp': ts,
|
2018-10-18 03:10:23 +02:00
|
|
|
'date': datetime.fromtimestamp(ts).isoformat(' ')[:-3] if tx.height > 0 else None,
|
2018-12-06 06:21:42 +01:00
|
|
|
'confirmations': (headers.height+1) - tx.height if tx.height > 0 else 0,
|
2018-11-05 06:09:30 +01:00
|
|
|
'claim_info': [],
|
|
|
|
'update_info': [],
|
|
|
|
'support_info': [],
|
|
|
|
'abandon_info': []
|
|
|
|
}
|
2018-11-28 22:28:30 +01:00
|
|
|
is_my_inputs = all([txi.is_my_account for txi in tx.inputs])
|
|
|
|
if is_my_inputs:
|
2018-11-28 20:41:45 +01:00
|
|
|
# fees only matter if we are the ones paying them
|
|
|
|
item['value'] = dewies_to_lbc(tx.net_account_balance+tx.fee)
|
|
|
|
item['fee'] = dewies_to_lbc(-tx.fee)
|
2018-11-28 16:57:32 +01:00
|
|
|
else:
|
2018-11-28 20:41:45 +01:00
|
|
|
# someone else paid the fees
|
2018-11-28 16:57:32 +01:00
|
|
|
item['value'] = dewies_to_lbc(tx.net_account_balance)
|
2018-11-28 20:41:45 +01:00
|
|
|
item['fee'] = '0.0'
|
2018-11-05 06:09:30 +01:00
|
|
|
for txo in tx.my_claim_outputs:
|
|
|
|
item['claim_info'].append({
|
2018-09-26 04:40:52 +02:00
|
|
|
'address': txo.get_address(account.ledger),
|
2018-10-03 22:38:47 +02:00
|
|
|
'balance_delta': dewies_to_lbc(-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2018-09-26 04:40:52 +02:00
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
2018-11-05 06:09:30 +01:00
|
|
|
})
|
|
|
|
for txo in tx.my_update_outputs:
|
2018-11-28 22:28:30 +01:00
|
|
|
if is_my_inputs: # updating my own claim
|
|
|
|
previous = None
|
|
|
|
for txi in tx.inputs:
|
|
|
|
if txi.txo_ref.txo is not None:
|
|
|
|
other_txo = txi.txo_ref.txo
|
2018-12-05 20:38:35 +01:00
|
|
|
if (other_txo.is_claim or other_txo.script.is_support_claim) \
|
|
|
|
and other_txo.claim_id == txo.claim_id:
|
2018-11-28 22:28:30 +01:00
|
|
|
previous = other_txo
|
|
|
|
break
|
2018-12-06 06:21:42 +01:00
|
|
|
if previous is not None:
|
|
|
|
item['update_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
|
|
|
'balance_delta': dewies_to_lbc(previous.amount-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-11-28 22:28:30 +01:00
|
|
|
else: # someone sent us their claim
|
|
|
|
item['update_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
|
|
|
'balance_delta': dewies_to_lbc(0),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-11-05 06:09:30 +01:00
|
|
|
for txo in tx.my_support_outputs:
|
|
|
|
item['support_info'].append({
|
2018-09-26 04:40:52 +02:00
|
|
|
'address': txo.get_address(account.ledger),
|
2018-11-28 22:28:30 +01:00
|
|
|
'balance_delta': dewies_to_lbc(txo.amount if not is_my_inputs else -txo.amount),
|
2018-10-03 22:38:47 +02:00
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2018-09-26 04:40:52 +02:00
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
2018-11-28 22:28:30 +01:00
|
|
|
'is_tip': not is_my_inputs,
|
2018-09-26 04:40:52 +02:00
|
|
|
'nout': txo.position
|
2018-11-05 06:09:30 +01:00
|
|
|
})
|
2019-03-29 20:33:50 +01:00
|
|
|
if is_my_inputs:
|
|
|
|
for txo in tx.other_support_outputs:
|
|
|
|
item['support_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
|
|
|
'balance_delta': dewies_to_lbc(-txo.amount),
|
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'is_tip': is_my_inputs,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-11-05 06:09:30 +01:00
|
|
|
for txo in tx.my_abandon_outputs:
|
|
|
|
item['abandon_info'].append({
|
|
|
|
'address': txo.get_address(account.ledger),
|
2018-11-28 16:09:13 +01:00
|
|
|
'balance_delta': dewies_to_lbc(txo.amount),
|
2018-11-05 06:09:30 +01:00
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
|
|
|
'claim_id': txo.claim_id,
|
|
|
|
'claim_name': txo.claim_name,
|
|
|
|
'nout': txo.position
|
|
|
|
})
|
2018-10-09 20:46:44 +02:00
|
|
|
history.append(item)
|
2018-09-26 04:40:52 +02:00
|
|
|
return history
|
2018-05-29 05:12:11 +02:00
|
|
|
|
2019-05-22 12:33:57 +02:00
|
|
|
async def export_certificate_info(self, claim_id, account, password=None, insecure=False):
|
|
|
|
if password is None and not insecure:
|
|
|
|
raise ValueError(
|
|
|
|
"Password not provided. If you wish to export channel without a password, please use the "
|
|
|
|
"--insecure flag"
|
|
|
|
)
|
|
|
|
|
|
|
|
if password is not None and insecure:
|
|
|
|
raise ValueError(
|
|
|
|
"Password and insecure flag cannot be provided together. Please remove the insecure flag"
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
channel_txo = (await account.get_channels(claim_id=claim_id, limit=1))[0]
|
|
|
|
private_key_str = channel_txo.pem_to_private_key_str(channel_txo.private_key)
|
|
|
|
except Exception:
|
|
|
|
raise LookupError(f"Cannot retrieve private key for channel id: {claim_id}")
|
|
|
|
|
|
|
|
if not password:
|
|
|
|
serialized_certificate_info = private_key_str + _NO_PASSWORD_USED_BYTES
|
|
|
|
else:
|
|
|
|
encrypted_private_key = self.encrypt_private_key_with_password(private_key_str, password)
|
|
|
|
serialized_certificate_info = encrypted_private_key + _PASSWORD_USED_BYTES
|
|
|
|
|
|
|
|
x = hexlify(serialized_certificate_info).decode()
|
|
|
|
return x
|
|
|
|
|
|
|
|
async def import_certificate_info(self, serialized_certificate_info, password, account):
|
|
|
|
serialized_certificate_info = (unhexlify(serialized_certificate_info.encode()))
|
|
|
|
|
|
|
|
if password is None and serialized_certificate_info.endswith(_PASSWORD_USED_BYTES):
|
|
|
|
raise ValueError("The certificate was encrypted with a password but no password was provided.")
|
|
|
|
|
|
|
|
if password is not None and serialized_certificate_info.endswith(_NO_PASSWORD_USED_BYTES):
|
|
|
|
raise ValueError("The certificate was not encrypted with a password but a password was provided.")
|
|
|
|
|
|
|
|
serialized_certificate_info = serialized_certificate_info[0:-2]
|
|
|
|
|
|
|
|
if not password:
|
|
|
|
private_key = Transaction.output_class.private_key_from_str(serialized_certificate_info)
|
|
|
|
else:
|
|
|
|
decrypted_private_key = self.decrypt_serilized_info_with_password(serialized_certificate_info, password)
|
|
|
|
private_key = Transaction.output_class.private_key_from_str(decrypted_private_key)
|
|
|
|
|
|
|
|
public_key_bytes = private_key.get_verifying_key().to_der()
|
|
|
|
channel_pubkey_hash = account.ledger.public_key_to_address(public_key_bytes)
|
|
|
|
account.channel_keys[channel_pubkey_hash] = private_key.to_pem().decode()
|
|
|
|
account.wallet.save()
|
|
|
|
|
|
|
|
def encrypt_private_key_with_password(self, private_key_str, password):
|
|
|
|
pass
|
|
|
|
|
2018-07-12 18:18:58 +02:00
|
|
|
def save(self):
|
|
|
|
for wallet in self.wallets:
|
|
|
|
wallet.save()
|