2018-07-12 18:14:47 +02:00
|
|
|
import logging
|
|
|
|
|
2018-07-12 05:18:59 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from torba.baseaccount import BaseAccount
|
2018-08-03 18:31:50 +02:00
|
|
|
from torba.basetransaction import TXORef
|
2018-07-12 05:18:59 +02:00
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
from lbryschema.claim import ClaimDict
|
|
|
|
from lbryschema.signer import SECP256k1, get_signer
|
|
|
|
|
|
|
|
|
2018-07-12 18:14:47 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2018-06-26 23:27:24 +02:00
|
|
|
|
|
|
|
def generate_certificate():
|
|
|
|
secp256k1_private_key = get_signer(SECP256k1).generate().private_key.to_pem()
|
|
|
|
return ClaimDict.generate_certificate(secp256k1_private_key, curve=SECP256k1), secp256k1_private_key
|
|
|
|
|
|
|
|
|
|
|
|
class Account(BaseAccount):
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2018-07-22 00:34:59 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
2018-06-26 23:27:24 +02:00
|
|
|
self.certificates = {}
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def add_certificate_private_key(self, ref: TXORef, private_key):
|
|
|
|
assert ref.id not in self.certificates, 'Trying to add a duplicate certificate.'
|
|
|
|
self.certificates[ref.id] = private_key
|
2018-07-05 04:16:02 +02:00
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def get_certificate_private_key(self, ref: TXORef):
|
|
|
|
return self.certificates.get(ref.id)
|
2018-07-09 15:55:07 +02:00
|
|
|
|
2018-07-12 05:18:59 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def maybe_migrate_certificates(self):
|
2018-08-04 03:39:48 +02:00
|
|
|
failed, succeded, done, total = 0, 0, 0, 0
|
2018-07-12 07:47:34 +02:00
|
|
|
for maybe_claim_id in self.certificates.keys():
|
2018-07-12 18:14:47 +02:00
|
|
|
total += 1
|
2018-07-12 07:47:34 +02:00
|
|
|
if ':' not in maybe_claim_id:
|
|
|
|
claims = yield self.ledger.network.get_claims_by_ids(maybe_claim_id)
|
2018-07-12 18:14:47 +02:00
|
|
|
claim = claims[maybe_claim_id]
|
2018-08-04 03:39:48 +02:00
|
|
|
#txhash = unhexlify(claim['txid'])[::-1]
|
|
|
|
tx = yield self.ledger.get_transaction(claim['txid'])
|
2018-07-12 18:14:47 +02:00
|
|
|
if tx is not None:
|
|
|
|
txo = tx.outputs[claim['nout']]
|
|
|
|
assert txo.script.is_claim_involved,\
|
|
|
|
"Certificate with claim_id {} doesn't point to a valid transaction."\
|
|
|
|
.format(maybe_claim_id)
|
|
|
|
tx_nout = '{txid}:{nout}'.format(**claim)
|
|
|
|
self.certificates[tx_nout] = self.certificates[maybe_claim_id]
|
|
|
|
del self.certificates[maybe_claim_id]
|
|
|
|
log.info(
|
2018-07-15 07:20:44 +02:00
|
|
|
"Migrated certificate with claim_id '%s' ('%s') to a new look up key %s.",
|
|
|
|
maybe_claim_id, txo.script.values['claim_name'], tx_nout
|
2018-07-12 18:14:47 +02:00
|
|
|
)
|
|
|
|
succeded += 1
|
|
|
|
else:
|
|
|
|
log.warning(
|
2018-07-15 07:20:44 +02:00
|
|
|
"Failed to migrate claim '%s', it's not associated with any of your addresses.",
|
|
|
|
maybe_claim_id
|
2018-07-12 18:14:47 +02:00
|
|
|
)
|
|
|
|
failed += 1
|
2018-08-04 03:39:48 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
txid, nout = maybe_claim_id.split(':')
|
|
|
|
tx = yield self.ledger.get_transaction(txid)
|
|
|
|
if tx.outputs[int(nout)].script.is_claim_involved:
|
|
|
|
done += 1
|
|
|
|
else:
|
|
|
|
failed += 1
|
|
|
|
except Exception:
|
|
|
|
log.exception("Couldn't verify certificate with look up key: %s", maybe_claim_id)
|
|
|
|
failed += 1
|
|
|
|
|
|
|
|
log.info('Checked: %s, Done: %s, Converted: %s, Failed: %s', total, done, succeded, failed)
|
2018-07-12 05:57:22 +02:00
|
|
|
|
2018-07-17 05:32:37 +02:00
|
|
|
def get_balance(self, confirmations=6, include_claims=False, **constraints):
|
|
|
|
if not include_claims:
|
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
2018-07-22 00:34:59 +02:00
|
|
|
return super().get_balance(confirmations, **constraints)
|
2018-07-17 05:32:37 +02:00
|
|
|
|
|
|
|
def get_unspent_outputs(self, include_claims=False, **constraints):
|
|
|
|
if not include_claims:
|
|
|
|
constraints.update({'is_claim': 0, 'is_update': 0, 'is_support': 0})
|
2018-07-22 00:34:59 +02:00
|
|
|
return super().get_unspent_outputs(**constraints)
|
2018-07-12 07:47:34 +02:00
|
|
|
|
2018-08-04 03:39:48 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_channels(self):
|
|
|
|
utxos = yield super().get_unspent_outputs(
|
|
|
|
claim_type__any={'is_claim': 1, 'is_update': 1},
|
|
|
|
claim_name__like='@%'
|
|
|
|
)
|
|
|
|
channels = []
|
|
|
|
for utxo in utxos:
|
|
|
|
d = ClaimDict.deserialize(utxo.script.values['claim'])
|
|
|
|
channels.append({
|
2018-08-04 18:10:41 +02:00
|
|
|
'name': utxo.claim_name,
|
|
|
|
'claim_id': utxo.claim_id,
|
2018-08-04 03:39:48 +02:00
|
|
|
'txid': utxo.tx_ref.id,
|
|
|
|
'nout': utxo.position,
|
|
|
|
'have_certificate': utxo.ref.id in self.certificates
|
|
|
|
})
|
|
|
|
defer.returnValue(channels)
|
|
|
|
|
2018-07-12 07:47:34 +02:00
|
|
|
@classmethod
|
2018-08-03 18:31:50 +02:00
|
|
|
def from_dict(cls, ledger, d: dict) -> 'Account':
|
2018-07-22 00:34:59 +02:00
|
|
|
account = super().from_dict(ledger, d)
|
2018-07-12 07:47:34 +02:00
|
|
|
account.certificates = d['certificates']
|
|
|
|
return account
|
|
|
|
|
|
|
|
def to_dict(self):
|
2018-07-22 00:34:59 +02:00
|
|
|
d = super().to_dict()
|
2018-07-12 07:47:34 +02:00
|
|
|
d['certificates'] = self.certificates
|
|
|
|
return d
|
2018-08-01 04:59:51 +02:00
|
|
|
|
|
|
|
def get_claim(self, claim_id):
|
2018-08-01 15:11:34 +02:00
|
|
|
return self.ledger.db.get_claim(self, claim_id)
|