2018-08-09 02:41:29 +02:00
|
|
|
import json
|
2018-07-12 18:14:47 +02:00
|
|
|
import logging
|
|
|
|
|
2018-07-12 05:18:59 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from torba.baseaccount import BaseAccount
|
2018-08-03 18:31:50 +02:00
|
|
|
from torba.basetransaction import TXORef
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
from lbryschema.claim import ClaimDict
|
|
|
|
from lbryschema.signer import SECP256k1, get_signer
|
|
|
|
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
|
|
|
|
def generate_certificate():
|
|
|
|
secp256k1_private_key = get_signer(SECP256k1).generate().private_key.to_pem()
|
|
|
|
return ClaimDict.generate_certificate(secp256k1_private_key, curve=SECP256k1), secp256k1_private_key
|
|
|
|
|
|
|
|
|
|
|
|
class Account(BaseAccount):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
2018-06-26 23:27:24 +02:00
|
|
|
self.certificates = {}
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def add_certificate_private_key(self, ref: TXORef, private_key):
|
|
|
|
assert ref.id not in self.certificates, 'Trying to add a duplicate certificate.'
|
|
|
|
self.certificates[ref.id] = private_key
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def get_certificate_private_key(self, ref: TXORef):
|
|
|
|
return self.certificates.get(ref.id)
|
2018-07-09 15:55:07 +02:00
|
|
|
|
2018-07-12 05:18:59 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def maybe_migrate_certificates(self):
|
2018-08-09 02:41:29 +02:00
|
|
|
if not self.certificates:
|
|
|
|
return
|
|
|
|
|
|
|
|
addresses = {}
|
|
|
|
results = {
|
|
|
|
'total': 0,
|
|
|
|
'not-a-claim-tx': 0,
|
|
|
|
'migrate-success': 0,
|
|
|
|
'migrate-failed': 0,
|
|
|
|
'previous-success': 0,
|
|
|
|
'previous-corrupted': 0
|
|
|
|
}
|
|
|
|
|
2018-08-09 03:19:15 +02:00
|
|
|
for maybe_claim_id in list(self.certificates):
|
2018-08-09 02:41:29 +02:00
|
|
|
results['total'] += 1
|
2018-07-12 07:47:34 +02:00
|
|
|
if ':' not in maybe_claim_id:
|
|
|
|
claims = yield self.ledger.network.get_claims_by_ids(maybe_claim_id)
|
2018-08-27 06:46:42 +02:00
|
|
|
if maybe_claim_id not in claims:
|
|
|
|
log.warning(
|
|
|
|
"Failed to migrate claim '%s', server did not return any claim information.",
|
|
|
|
maybe_claim_id
|
|
|
|
)
|
|
|
|
results['migrate-failed'] += 1
|
|
|
|
continue
|
2018-07-12 18:14:47 +02:00
|
|
|
claim = claims[maybe_claim_id]
|
2018-08-17 16:35:56 +02:00
|
|
|
tx = None
|
|
|
|
if claim:
|
2018-10-09 01:33:55 +02:00
|
|
|
tx = yield self.ledger.db.get_transaction(txid=claim['txid'])
|
2018-08-17 16:35:56 +02:00
|
|
|
else:
|
|
|
|
log.warning(maybe_claim_id)
|
2018-07-12 18:14:47 +02:00
|
|
|
if tx is not None:
|
|
|
|
txo = tx.outputs[claim['nout']]
|
2018-08-09 02:41:29 +02:00
|
|
|
if not txo.script.is_claim_involved:
|
|
|
|
results['not-a-claim-tx'] += 1
|
|
|
|
raise ValueError(
|
|
|
|
"Certificate with claim_id {} doesn't point to a valid transaction."
|
|
|
|
.format(maybe_claim_id)
|
|
|
|
)
|
2018-07-12 18:14:47 +02:00
|
|
|
tx_nout = '{txid}:{nout}'.format(**claim)
|
|
|
|
self.certificates[tx_nout] = self.certificates[maybe_claim_id]
|
|
|
|
del self.certificates[maybe_claim_id]
|
|
|
|
log.info(
|
2018-07-15 07:20:44 +02:00
|
|
|
"Migrated certificate with claim_id '%s' ('%s') to a new look up key %s.",
|
|
|
|
maybe_claim_id, txo.script.values['claim_name'], tx_nout
|
2018-07-12 18:14:47 +02:00
|
|
|
)
|
2018-08-09 02:41:29 +02:00
|
|
|
results['migrate-success'] += 1
|
2018-07-12 18:14:47 +02:00
|
|
|
else:
|
2018-08-17 16:35:56 +02:00
|
|
|
if claim:
|
|
|
|
addresses.setdefault(claim['address'], 0)
|
|
|
|
addresses[claim['address']] += 1
|
|
|
|
log.warning(
|
|
|
|
"Failed to migrate claim '%s', it's not associated with any of your addresses.",
|
|
|
|
maybe_claim_id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
log.warning(
|
|
|
|
"Failed to migrate claim '%s', it appears abandoned.",
|
|
|
|
maybe_claim_id
|
|
|
|
)
|
2018-08-09 02:41:29 +02:00
|
|
|
results['migrate-failed'] += 1
|
2018-08-04 03:39:48 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
txid, nout = maybe_claim_id.split(':')
|
2018-10-09 01:33:55 +02:00
|
|
|
tx = yield self.ledger.db.get_transaction(txid=txid)
|
2018-08-04 03:39:48 +02:00
|
|
|
if tx.outputs[int(nout)].script.is_claim_involved:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-success'] += 1
|
2018-08-04 03:39:48 +02:00
|
|
|
else:
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-corrupted'] += 1
|
2018-08-04 03:39:48 +02:00
|
|
|
except Exception:
|
|
|
|
log.exception("Couldn't verify certificate with look up key: %s", maybe_claim_id)
|
2018-08-09 02:41:29 +02:00
|
|
|
results['previous-corrupted'] += 1
|
|
|
|
|
|
|
|
self.wallet.save()
|
|
|
|
log.info('verifying and possibly migrating certificates:')
|
|
|
|
log.info(json.dumps(results, indent=2))
|
|
|
|
if addresses:
|
|
|
|
log.warning('failed for addresses:')
|
|
|
|
log.warning(json.dumps(
|
|
|
|
[{'address': a, 'number of certificates': c} for a, c in addresses.items()],
|
|
|
|
indent=2
|
|
|
|
))
|
2018-07-12 05:57:22 +02:00
|
|
|
|
2018-10-08 20:09:37 +02:00
|
|
|
def get_balance(self, confirmations=0, include_claims=False, **constraints):
|
2018-07-17 05:32:37 +02:00
|
|
|
if not include_claims:
|
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
2018-07-22 00:34:59 +02:00
|
|
|
return super().get_balance(confirmations, **constraints)
|
2018-07-17 05:32:37 +02:00
|
|
|
|
2018-08-06 06:28:11 +02:00
|
|
|
@classmethod
|
|
|
|
def get_private_key_from_seed(cls, ledger: 'baseledger.BaseLedger', seed: str, password: str):
|
|
|
|
return super().get_private_key_from_seed(
|
|
|
|
ledger, seed, password or 'lbryum'
|
|
|
|
)
|
|
|
|
|
2018-07-12 07:47:34 +02:00
|
|
|
@classmethod
|
2018-08-09 02:41:29 +02:00
|
|
|
def from_dict(cls, ledger, wallet, d: dict) -> 'Account':
|
|
|
|
account = super().from_dict(ledger, wallet, d)
|
2018-08-06 08:53:27 +02:00
|
|
|
account.certificates = d.get('certificates', {})
|
2018-07-12 07:47:34 +02:00
|
|
|
return account
|
|
|
|
|
|
|
|
def to_dict(self):
|
2018-07-22 00:34:59 +02:00
|
|
|
d = super().to_dict()
|
2018-07-12 07:47:34 +02:00
|
|
|
d['certificates'] = self.certificates
|
|
|
|
return d
|
2018-08-01 04:59:51 +02:00
|
|
|
|
2018-08-30 06:04:25 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_details(self, **kwargs):
|
|
|
|
details = yield super().get_details(**kwargs)
|
|
|
|
details['certificates'] = len(self.certificates)
|
|
|
|
return details
|
|
|
|
|
2018-09-18 23:18:02 +02:00
|
|
|
def get_claim(self, claim_id=None, txid=None, nout=None):
|
2018-10-03 18:00:21 +02:00
|
|
|
if claim_id is not None:
|
|
|
|
return self.ledger.db.get_claims(account=self, claim_id=claim_id)
|
|
|
|
elif txid is not None and nout is not None:
|
|
|
|
return self.ledger.db.get_claims(**{'account': self, 'txo.txid': txid, 'txo.position': nout})
|
2018-09-21 15:47:06 +02:00
|
|
|
|
2018-10-10 02:46:41 +02:00
|
|
|
@staticmethod
|
|
|
|
def constraint_utxos_sans_claims(constraints):
|
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
|
|
|
|
|
|
|
def get_utxos(self, **constraints):
|
|
|
|
self.constraint_utxos_sans_claims(constraints)
|
|
|
|
return super().get_utxos(**constraints)
|
|
|
|
|
|
|
|
def get_utxo_count(self, **constraints):
|
|
|
|
self.constraint_utxos_sans_claims(constraints)
|
|
|
|
return super().get_utxo_count(**constraints)
|
|
|
|
|
|
|
|
def get_claims(self, **constraints):
|
|
|
|
return self.ledger.db.get_claims(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_claim_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_claim_count(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_channels(self, **constraints):
|
|
|
|
return self.ledger.db.get_channels(account=self, **constraints)
|
|
|
|
|
|
|
|
def get_channel_count(self, **constraints):
|
|
|
|
return self.ledger.db.get_channel_count(account=self, **constraints)
|