2019-05-18 22:44:33 +02:00
|
|
|
import hashlib
|
2018-08-09 02:41:29 +02:00
|
|
|
import json
|
2018-07-12 18:14:47 +02:00
|
|
|
import logging
|
2019-03-11 14:52:35 +01:00
|
|
|
from hashlib import sha256
|
2019-03-19 00:34:01 +01:00
|
|
|
from string import hexdigits
|
2018-07-12 18:14:47 +02:00
|
|
|
|
2019-05-18 22:44:33 +02:00
|
|
|
import ecdsa
|
|
|
|
|
2019-05-04 23:22:32 +02:00
|
|
|
from torba.client.baseaccount import BaseAccount, HierarchicalDeterministic
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
|
2019-03-19 00:34:01 +01:00
|
|
|
def validate_claim_id(claim_id):
|
|
|
|
if not len(claim_id) == 40:
|
|
|
|
raise Exception("Incorrect claimid length: %i" % len(claim_id))
|
|
|
|
if isinstance(claim_id, bytes):
|
|
|
|
claim_id = claim_id.decode('utf-8')
|
|
|
|
if set(claim_id).difference(hexdigits):
|
|
|
|
raise Exception("Claim id is not hex encoded")
|
|
|
|
|
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
class Account(BaseAccount):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
2019-03-24 21:55:04 +01:00
|
|
|
self.channel_keys = {}
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2019-03-11 14:52:35 +01:00
|
|
|
@property
|
|
|
|
def hash(self) -> bytes:
|
|
|
|
h = sha256(json.dumps(self.to_dict(False)).encode())
|
2019-03-24 21:55:04 +01:00
|
|
|
for cert in sorted(self.channel_keys.keys()):
|
2019-03-11 14:52:35 +01:00
|
|
|
h.update(cert.encode())
|
|
|
|
return h.digest()
|
|
|
|
|
2019-03-12 20:31:54 +01:00
|
|
|
def apply(self, d: dict):
|
|
|
|
super().apply(d)
|
2019-03-24 21:55:04 +01:00
|
|
|
self.channel_keys.update(d.get('certificates', {}))
|
2019-03-12 20:31:54 +01:00
|
|
|
|
2019-05-20 09:29:48 +02:00
|
|
|
def add_channel_private_key(self, channel_name, channel_pubkey_hash, private_key):
|
2019-05-19 22:21:54 +02:00
|
|
|
if channel_pubkey_hash not in self.channel_keys:
|
|
|
|
self.channel_keys[channel_pubkey_hash] = private_key
|
|
|
|
else:
|
|
|
|
log.info("Public-Private key mapping for the channel %s already exists. Skipping...", channel_name)
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2019-05-18 22:44:33 +02:00
|
|
|
def get_channel_private_key(self, channel_pubkey_hash):
|
|
|
|
return self.channel_keys.get(channel_pubkey_hash)
|
2018-07-09 15:55:07 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def maybe_migrate_certificates(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
if not self.channel_keys:
|
2018-08-09 02:41:29 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
results = {
|
|
|
|
'total': 0,
|
2019-05-20 09:47:43 +02:00
|
|
|
'consolidated': 0,
|
2018-08-09 02:41:29 +02:00
|
|
|
'migrate-success': 0,
|
|
|
|
'migrate-failed': 0,
|
|
|
|
'previous-success': 0,
|
|
|
|
'previous-corrupted': 0
|
|
|
|
}
|
2018-11-30 18:46:53 +01:00
|
|
|
|
2019-05-18 22:44:33 +02:00
|
|
|
new_channel_keys = {}
|
|
|
|
|
|
|
|
for maybe_outpoint in self.channel_keys:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['total'] += 1
|
2019-05-18 22:44:33 +02:00
|
|
|
if ':' in maybe_outpoint:
|
2018-11-30 00:56:55 +01:00
|
|
|
try:
|
2019-05-18 22:44:33 +02:00
|
|
|
private_key_pem = self.channel_keys[maybe_outpoint]
|
|
|
|
pubkey_hash = self._get_pubkey_address_from_private_key_pem(private_key_pem)
|
|
|
|
|
|
|
|
if pubkey_hash not in new_channel_keys and pubkey_hash not in self.channel_keys:
|
|
|
|
new_channel_keys[pubkey_hash] = private_key_pem
|
|
|
|
results['migrate-success'] += 1
|
2019-05-20 09:47:43 +02:00
|
|
|
else:
|
|
|
|
results['consolidated'] += 1
|
2018-11-30 00:56:55 +01:00
|
|
|
except Exception as e:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['migrate-failed'] += 1
|
2019-05-18 22:44:33 +02:00
|
|
|
log.warning("Failed to migrate certificate for %s, incorrect private key: %s",
|
|
|
|
maybe_outpoint, str(e))
|
2018-08-04 03:39:48 +02:00
|
|
|
else:
|
|
|
|
try:
|
2019-05-18 22:44:33 +02:00
|
|
|
pubkey_hash = self._get_pubkey_address_from_private_key_pem(self.channel_keys[maybe_outpoint])
|
|
|
|
if pubkey_hash == maybe_outpoint:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-success'] += 1
|
2018-08-04 03:39:48 +02:00
|
|
|
else:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-corrupted'] += 1
|
2019-05-18 22:44:33 +02:00
|
|
|
except Exception as e:
|
|
|
|
log.warning("Corrupt public:private key-pair: %s", str(e))
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-corrupted'] += 1
|
|
|
|
|
2019-05-22 21:44:00 +02:00
|
|
|
self.channel_keys = new_channel_keys
|
2019-05-18 22:44:33 +02:00
|
|
|
|
2018-08-09 02:41:29 +02:00
|
|
|
self.wallet.save()
|
|
|
|
log.info('verifying and possibly migrating certificates:')
|
|
|
|
log.info(json.dumps(results, indent=2))
|
2018-07-12 05:57:22 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def save_max_gap(self):
|
2019-05-04 23:22:32 +02:00
|
|
|
if issubclass(self.address_generator, HierarchicalDeterministic):
|
|
|
|
gap = await self.get_max_gap()
|
|
|
|
self.receiving.gap = max(20, gap['max_receiving_gap'] + 1)
|
|
|
|
self.change.gap = max(6, gap['max_change_gap'] + 1)
|
|
|
|
self.wallet.save()
|
2018-10-10 03:39:29 +02:00
|
|
|
|
2018-10-08 20:09:37 +02:00
|
|
|
def get_balance(self, confirmations=0, include_claims=False, **constraints):
|
2018-07-17 05:32:37 +02:00
|
|
|
if not include_claims:
|
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
2018-07-22 00:34:59 +02:00
|
|
|
return super().get_balance(confirmations, **constraints)
|
2018-07-17 05:32:37 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
@classmethod
|
2019-03-25 03:20:17 +01:00
|
|
|
def get_private_key_from_seed(cls, ledger, seed: str, password: str):
|
2018-08-06 06:28:11 +02:00
|
|
|
return super().get_private_key_from_seed(
|
|
|
|
ledger, seed, password or 'lbryum'
|
|
|
|
)
|
|
|
|
|
2018-07-12 07:47:34 +02:00
|
|
|
@classmethod
|
2018-08-09 02:41:29 +02:00
|
|
|
def from_dict(cls, ledger, wallet, d: dict) -> 'Account':
|
|
|
|
account = super().from_dict(ledger, wallet, d)
|
2019-03-24 21:55:04 +01:00
|
|
|
account.channel_keys = d.get('certificates', {})
|
2018-07-12 07:47:34 +02:00
|
|
|
return account
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
def to_dict(self, include_channel_keys=True):
|
2018-07-22 00:34:59 +02:00
|
|
|
d = super().to_dict()
|
2019-03-24 21:55:04 +01:00
|
|
|
if include_channel_keys:
|
|
|
|
d['certificates'] = self.channel_keys
|
2018-07-12 07:47:34 +02:00
|
|
|
return d
|
2018-08-01 04:59:51 +02:00
|
|
|
|
2018-10-15 23:16:43 +02:00
|
|
|
async def get_details(self, **kwargs):
|
|
|
|
details = await super().get_details(**kwargs)
|
2019-03-24 21:55:04 +01:00
|
|
|
details['certificates'] = len(self.channel_keys)
|
2018-08-30 06:04:25 +02:00
|
|
|
return details
|
|
|
|
|
2018-10-10 02:46:41 +02:00
|
|
|
@staticmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def constraint_spending_utxos(constraints):
|
2018-10-10 02:46:41 +02:00
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
|
|
|
|
|
|
|
def get_utxos(self, **constraints):
|
2019-03-24 21:55:04 +01:00
|
|
|
self.constraint_spending_utxos(constraints)
|
2018-10-10 02:46:41 +02:00
|
|
|
return super().get_utxos(**constraints)
|
|
|
|
|
|
|
|
def get_utxo_count(self, **constraints):
|
2019-03-24 21:55:04 +01:00
|
|
|
self.constraint_spending_utxos(constraints)
|
2018-10-10 02:46:41 +02:00
|
|
|
return super().get_utxo_count(**constraints)
|
|
|
|
|
|
|
|
def get_claims(self, **constraints):
|
|
|
|
return self.ledger.db.get_claims(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_claim_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_claim_count(account=self, **constraints)
|
|
|
|
|
2019-03-25 23:30:43 +01:00
|
|
|
def get_streams(self, **constraints):
|
|
|
|
return self.ledger.db.get_streams(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_stream_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_stream_count(account=self, **constraints)
|
|
|
|
|
2018-10-10 02:46:41 +02:00
|
|
|
def get_channels(self, **constraints):
|
|
|
|
return self.ledger.db.get_channels(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_channel_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_channel_count(account=self, **constraints)
|
2018-11-21 00:21:53 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
def get_supports(self, **constraints):
|
|
|
|
return self.ledger.db.get_supports(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_support_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_support_count(account=self, **constraints)
|
|
|
|
|
2019-01-04 08:49:29 +01:00
|
|
|
async def release_all_outputs(self):
|
|
|
|
await self.ledger.db.release_all_outputs(self)
|
2019-05-18 22:44:33 +02:00
|
|
|
|
|
|
|
def _get_pubkey_address_from_private_key_pem(self, private_key_pem):
|
|
|
|
private_key = ecdsa.SigningKey.from_pem(private_key_pem, hashfunc=hashlib.sha256)
|
|
|
|
public_key_der = private_key.get_verifying_key().to_der()
|
2019-05-19 22:21:54 +02:00
|
|
|
return self.ledger.public_key_to_address(public_key_der)
|