2018-06-14 06:53:38 +02:00
|
|
|
import struct
|
2019-03-20 06:46:23 +01:00
|
|
|
import hashlib
|
2018-08-04 18:10:41 +02:00
|
|
|
from binascii import hexlify, unhexlify
|
2019-03-26 03:11:11 +01:00
|
|
|
from typing import List, Optional
|
2018-06-14 06:53:38 +02:00
|
|
|
|
2019-03-19 00:34:01 +01:00
|
|
|
import ecdsa
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
|
|
|
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
|
|
|
from cryptography.hazmat.primitives import hashes
|
|
|
|
from cryptography.hazmat.primitives.asymmetric import ec
|
|
|
|
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
|
|
|
|
from ecdsa.util import sigencode_der
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
from torba.client.basetransaction import BaseTransaction, BaseInput, BaseOutput, ReadOnlyList
|
2019-03-19 00:34:01 +01:00
|
|
|
from torba.client.hash import hash160, sha256, Base58
|
2019-03-18 05:59:13 +01:00
|
|
|
from lbrynet.schema.claim import Claim
|
2019-03-18 23:15:02 +01:00
|
|
|
from lbrynet.wallet.account import Account
|
|
|
|
from lbrynet.wallet.script import InputScript, OutputScript
|
2018-06-14 06:53:38 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Input(BaseInput):
|
2018-08-04 18:10:41 +02:00
|
|
|
script: InputScript
|
2018-06-14 06:53:38 +02:00
|
|
|
script_class = InputScript
|
|
|
|
|
|
|
|
|
|
|
|
class Output(BaseOutput):
|
2018-08-04 18:10:41 +02:00
|
|
|
script: OutputScript
|
2018-06-14 06:53:38 +02:00
|
|
|
script_class = OutputScript
|
|
|
|
|
2019-03-20 06:46:23 +01:00
|
|
|
__slots__ = 'channel', 'private_key'
|
2018-10-05 15:02:02 +02:00
|
|
|
|
2018-10-08 16:41:07 +02:00
|
|
|
def __init__(self, *args, channel: Optional['Output'] = None,
|
2018-10-12 15:49:13 +02:00
|
|
|
private_key: Optional[str] = None, **kwargs) -> None:
|
2018-10-05 15:02:02 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.channel = channel
|
2018-10-12 15:49:13 +02:00
|
|
|
self.private_key = private_key
|
2018-10-05 15:02:02 +02:00
|
|
|
|
|
|
|
def update_annotations(self, annotated):
|
|
|
|
super().update_annotations(annotated)
|
|
|
|
self.channel = annotated.channel if annotated else None
|
2018-10-12 15:49:13 +02:00
|
|
|
self.private_key = annotated.private_key if annotated else None
|
2018-10-05 15:02:02 +02:00
|
|
|
|
2018-08-03 18:31:50 +02:00
|
|
|
def get_fee(self, ledger):
|
|
|
|
name_fee = 0
|
|
|
|
if self.script.is_claim_name:
|
|
|
|
name_fee = len(self.script.values['claim_name']) * ledger.fee_per_name_char
|
|
|
|
return max(name_fee, super().get_fee(ledger))
|
|
|
|
|
2018-11-28 22:28:30 +01:00
|
|
|
@property
|
|
|
|
def is_claim(self) -> bool:
|
|
|
|
return self.script.is_claim_name or self.script.is_update_claim
|
|
|
|
|
2018-08-04 18:10:41 +02:00
|
|
|
@property
|
2019-03-22 07:18:34 +01:00
|
|
|
def claim_hash(self) -> bytes:
|
2018-08-04 18:10:41 +02:00
|
|
|
if self.script.is_claim_name:
|
2019-03-22 07:18:34 +01:00
|
|
|
return hash160(self.tx_ref.hash + struct.pack('>I', self.position))
|
2018-08-04 18:10:41 +02:00
|
|
|
elif self.script.is_update_claim or self.script.is_support_claim:
|
2019-03-22 07:18:34 +01:00
|
|
|
return self.script.values['claim_id']
|
2018-08-04 18:10:41 +02:00
|
|
|
else:
|
|
|
|
raise ValueError('No claim_id associated.')
|
2019-03-22 07:18:34 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def claim_id(self) -> str:
|
|
|
|
return hexlify(self.claim_hash[::-1]).decode()
|
2018-08-04 18:10:41 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def claim_name(self) -> str:
|
|
|
|
if self.script.is_claim_involved:
|
|
|
|
return self.script.values['claim_name'].decode()
|
|
|
|
raise ValueError('No claim_name associated.')
|
|
|
|
|
|
|
|
@property
|
2019-03-18 05:59:13 +01:00
|
|
|
def claim(self) -> Claim:
|
2018-11-28 22:28:30 +01:00
|
|
|
if self.is_claim:
|
2019-03-20 06:46:23 +01:00
|
|
|
if not isinstance(self.script.values['claim'], Claim):
|
|
|
|
self.script.values['claim'] = Claim.from_bytes(self.script.values['claim'])
|
|
|
|
return self.script.values['claim']
|
2018-10-05 15:02:02 +02:00
|
|
|
raise ValueError('Only claim name and claim update have the claim payload.')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def permanent_url(self) -> str:
|
2018-08-04 18:10:41 +02:00
|
|
|
if self.script.is_claim_involved:
|
2018-10-18 12:42:45 +02:00
|
|
|
return f"{self.claim_name}#{self.claim_id}"
|
2018-08-04 18:10:41 +02:00
|
|
|
raise ValueError('No claim associated.')
|
|
|
|
|
2018-10-08 16:41:07 +02:00
|
|
|
@property
|
2018-10-12 15:49:13 +02:00
|
|
|
def has_private_key(self):
|
|
|
|
return self.private_key is not None
|
2018-10-08 16:41:07 +02:00
|
|
|
|
2019-03-20 06:46:23 +01:00
|
|
|
def is_signed_by(self, channel: 'Output', ledger=None):
|
2019-03-19 00:34:01 +01:00
|
|
|
if self.claim.unsigned_payload:
|
2019-03-20 06:46:23 +01:00
|
|
|
pieces = [
|
2019-03-19 00:34:01 +01:00
|
|
|
Base58.decode(self.get_address(ledger)),
|
|
|
|
self.claim.unsigned_payload,
|
2019-04-04 05:15:16 +02:00
|
|
|
self.claim.signing_channel_hash[::-1]
|
2019-03-20 06:46:23 +01:00
|
|
|
]
|
2019-03-19 00:34:01 +01:00
|
|
|
else:
|
2019-03-20 06:46:23 +01:00
|
|
|
pieces = [
|
|
|
|
self.tx_ref.tx.inputs[0].txo_ref.id.encode(),
|
2019-03-22 07:18:34 +01:00
|
|
|
self.claim.signing_channel_hash,
|
2019-03-20 06:46:23 +01:00
|
|
|
self.claim.to_message_bytes()
|
|
|
|
]
|
|
|
|
digest = sha256(b''.join(pieces))
|
|
|
|
public_key = load_der_public_key(channel.claim.channel.public_key_bytes, default_backend())
|
|
|
|
hash = hashes.SHA256()
|
|
|
|
signature = hexlify(self.claim.signature)
|
|
|
|
r = int(signature[:int(len(signature)/2)], 16)
|
|
|
|
s = int(signature[int(len(signature)/2):], 16)
|
|
|
|
encoded_sig = sigencode_der(r, s, len(signature)*4)
|
|
|
|
public_key.verify(encoded_sig, digest, ec.ECDSA(Prehashed(hash)))
|
|
|
|
return True
|
|
|
|
|
|
|
|
def sign(self, channel: 'Output', first_input_id=None):
|
2019-03-24 21:55:04 +01:00
|
|
|
self.channel = channel
|
2019-03-22 07:18:34 +01:00
|
|
|
self.claim.signing_channel_hash = channel.claim_hash
|
2019-03-19 00:34:01 +01:00
|
|
|
digest = sha256(b''.join([
|
2019-03-20 06:46:23 +01:00
|
|
|
first_input_id or self.tx_ref.tx.inputs[0].txo_ref.id.encode(),
|
2019-03-22 07:18:34 +01:00
|
|
|
self.claim.signing_channel_hash,
|
2019-03-20 06:46:23 +01:00
|
|
|
self.claim.to_message_bytes()
|
|
|
|
]))
|
|
|
|
private_key = ecdsa.SigningKey.from_pem(channel.private_key, hashfunc=hashlib.sha256)
|
|
|
|
self.claim.signature = private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256)
|
2019-03-20 22:31:00 +01:00
|
|
|
self.script.generate()
|
2019-03-20 06:46:23 +01:00
|
|
|
|
|
|
|
def generate_channel_private_key(self):
|
|
|
|
private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
|
2019-03-24 21:55:04 +01:00
|
|
|
self.private_key = private_key.to_pem().decode()
|
2019-03-20 06:46:23 +01:00
|
|
|
self.claim.channel.public_key_bytes = private_key.get_verifying_key().to_der()
|
2019-03-24 21:55:04 +01:00
|
|
|
self.script.generate()
|
2019-03-20 06:46:23 +01:00
|
|
|
return self.private_key
|
|
|
|
|
|
|
|
def is_channel_private_key(self, private_key_pem):
|
|
|
|
private_key = ecdsa.SigningKey.from_pem(private_key_pem, hashfunc=hashlib.sha256)
|
|
|
|
return self.claim.channel.public_key_bytes == private_key.get_verifying_key().to_der()
|
2019-03-19 00:34:01 +01:00
|
|
|
|
2018-06-14 06:53:38 +02:00
|
|
|
@classmethod
|
2018-08-04 18:10:41 +02:00
|
|
|
def pay_claim_name_pubkey_hash(
|
2019-03-19 00:34:01 +01:00
|
|
|
cls, amount: int, claim_name: str, claim: Claim, pubkey_hash: bytes) -> 'Output':
|
2018-08-04 18:10:41 +02:00
|
|
|
script = cls.script_class.pay_claim_name_pubkey_hash(
|
2019-03-20 06:46:23 +01:00
|
|
|
claim_name.encode(), claim, pubkey_hash)
|
2019-03-19 00:34:01 +01:00
|
|
|
txo = cls(amount, script)
|
|
|
|
return txo
|
2018-08-14 22:16:29 +02:00
|
|
|
|
2018-08-04 18:10:41 +02:00
|
|
|
@classmethod
|
|
|
|
def pay_update_claim_pubkey_hash(
|
2019-03-19 00:34:01 +01:00
|
|
|
cls, amount: int, claim_name: str, claim_id: str, claim: Claim, pubkey_hash: bytes) -> 'Output':
|
2018-08-04 18:10:41 +02:00
|
|
|
script = cls.script_class.pay_update_claim_pubkey_hash(
|
|
|
|
claim_name.encode(), unhexlify(claim_id)[::-1], claim, pubkey_hash)
|
2019-03-19 00:34:01 +01:00
|
|
|
txo = cls(amount, script)
|
|
|
|
return txo
|
2018-06-14 06:53:38 +02:00
|
|
|
|
2018-08-29 21:43:05 +02:00
|
|
|
@classmethod
|
|
|
|
def pay_support_pubkey_hash(cls, amount: int, claim_name: str, claim_id: str, pubkey_hash: bytes) -> 'Output':
|
|
|
|
script = cls.script_class.pay_support_pubkey_hash(claim_name.encode(), unhexlify(claim_id)[::-1], pubkey_hash)
|
|
|
|
return cls(amount, script)
|
|
|
|
|
2019-03-19 00:34:01 +01:00
|
|
|
@classmethod
|
|
|
|
def purchase_claim_pubkey_hash(cls, amount: int, claim_id: str, pubkey_hash: bytes) -> 'Output':
|
|
|
|
script = cls.script_class.purchase_claim_pubkey_hash(unhexlify(claim_id)[::-1], pubkey_hash)
|
|
|
|
return cls(amount, script)
|
|
|
|
|
2018-06-14 06:53:38 +02:00
|
|
|
|
|
|
|
class Transaction(BaseTransaction):
|
|
|
|
|
|
|
|
input_class = Input
|
|
|
|
output_class = Output
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
outputs: ReadOnlyList[Output]
|
|
|
|
inputs: ReadOnlyList[Input]
|
|
|
|
|
2018-08-23 05:19:04 +02:00
|
|
|
@classmethod
|
|
|
|
def pay(cls, amount: int, address: bytes, funding_accounts: List[Account], change_account: Account):
|
|
|
|
ledger = cls.ensure_all_have_same_ledger(funding_accounts, change_account)
|
|
|
|
output = Output.pay_pubkey_hash(amount, ledger.address_to_hash160(address))
|
|
|
|
return cls.create([], [output], funding_accounts, change_account)
|
|
|
|
|
2018-06-14 21:18:36 +02:00
|
|
|
@classmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def claim_create(
|
|
|
|
cls, name: str, claim: Claim, amount: int, holding_address: str,
|
|
|
|
funding_accounts: List[Account], change_account: Account, signing_channel: Output = None):
|
2018-06-14 21:18:36 +02:00
|
|
|
ledger = cls.ensure_all_have_same_ledger(funding_accounts, change_account)
|
|
|
|
claim_output = Output.pay_claim_name_pubkey_hash(
|
2019-03-19 00:34:01 +01:00
|
|
|
amount, name, claim, ledger.address_to_hash160(holding_address)
|
2018-06-14 21:18:36 +02:00
|
|
|
)
|
2019-03-24 21:55:04 +01:00
|
|
|
if signing_channel is not None:
|
|
|
|
claim_output.sign(signing_channel, b'placeholder txid:nout')
|
|
|
|
return cls.create([], [claim_output], funding_accounts, change_account, sign=False)
|
2018-08-04 18:10:41 +02:00
|
|
|
|
2018-08-14 22:16:29 +02:00
|
|
|
@classmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def claim_update(
|
|
|
|
cls, previous_claim: Output, amount: int, holding_address: str,
|
|
|
|
funding_accounts: List[Account], change_account: Account, signing_channel: Output = None):
|
2018-08-14 22:16:29 +02:00
|
|
|
ledger = cls.ensure_all_have_same_ledger(funding_accounts, change_account)
|
2019-03-24 21:55:04 +01:00
|
|
|
updated_claim = Output.pay_update_claim_pubkey_hash(
|
|
|
|
amount, previous_claim.claim_name, previous_claim.claim_id,
|
|
|
|
previous_claim.claim, ledger.address_to_hash160(holding_address)
|
|
|
|
)
|
|
|
|
if signing_channel is not None:
|
|
|
|
updated_claim.sign(signing_channel, b'placeholder txid:nout')
|
|
|
|
return cls.create(
|
|
|
|
[Input.spend(previous_claim)], [updated_claim], funding_accounts, change_account, sign=False
|
2018-08-14 22:16:29 +02:00
|
|
|
)
|
|
|
|
|
2018-08-04 18:10:41 +02:00
|
|
|
@classmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def support(cls, claim_name: str, claim_id: str, amount: int, holding_address: str,
|
2019-03-24 23:14:02 +01:00
|
|
|
funding_accounts: List[Account], change_account: Account):
|
2018-08-04 18:10:41 +02:00
|
|
|
ledger = cls.ensure_all_have_same_ledger(funding_accounts, change_account)
|
2019-03-24 21:55:04 +01:00
|
|
|
support_output = Output.pay_support_pubkey_hash(
|
|
|
|
amount, claim_name, claim_id, ledger.address_to_hash160(holding_address)
|
2018-08-04 18:10:41 +02:00
|
|
|
)
|
2019-03-24 23:14:02 +01:00
|
|
|
return cls.create([], [support_output], funding_accounts, change_account)
|
2018-07-09 23:04:59 +02:00
|
|
|
|
2018-08-29 21:43:05 +02:00
|
|
|
@classmethod
|
2019-03-24 21:55:04 +01:00
|
|
|
def purchase(cls, claim: Output, amount: int, merchant_address: bytes,
|
|
|
|
funding_accounts: List[Account], change_account: Account):
|
2018-08-29 21:43:05 +02:00
|
|
|
ledger = cls.ensure_all_have_same_ledger(funding_accounts, change_account)
|
2019-03-24 21:55:04 +01:00
|
|
|
claim_output = Output.purchase_claim_pubkey_hash(
|
|
|
|
amount, claim.claim_id, ledger.address_to_hash160(merchant_address)
|
2018-08-29 21:43:05 +02:00
|
|
|
)
|
2019-03-24 21:55:04 +01:00
|
|
|
return cls.create([], [claim_output], funding_accounts, change_account)
|
2018-08-29 21:43:05 +02:00
|
|
|
|
2018-11-05 06:09:30 +01:00
|
|
|
@property
|
|
|
|
def my_inputs(self):
|
|
|
|
for txi in self.inputs:
|
|
|
|
if txi.txo_ref.txo is not None and txi.txo_ref.txo.is_my_account:
|
|
|
|
yield txi
|
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
def _filter_my_outputs(self, f):
|
|
|
|
for txo in self.outputs:
|
|
|
|
if txo.is_my_account and f(txo.script):
|
|
|
|
yield txo
|
|
|
|
|
2018-11-05 06:09:30 +01:00
|
|
|
def _filter_other_outputs(self, f):
|
|
|
|
for txo in self.outputs:
|
|
|
|
if not txo.is_my_account and f(txo.script):
|
|
|
|
yield txo
|
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
@property
|
|
|
|
def my_claim_outputs(self):
|
|
|
|
return self._filter_my_outputs(lambda s: s.is_claim_name)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def my_update_outputs(self):
|
|
|
|
return self._filter_my_outputs(lambda s: s.is_update_claim)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def my_support_outputs(self):
|
|
|
|
return self._filter_my_outputs(lambda s: s.is_support_claim)
|
|
|
|
|
2018-11-05 06:09:30 +01:00
|
|
|
@property
|
|
|
|
def other_support_outputs(self):
|
|
|
|
return self._filter_other_outputs(lambda s: s.is_support_claim)
|
|
|
|
|
2018-09-26 04:40:52 +02:00
|
|
|
@property
|
|
|
|
def my_abandon_outputs(self):
|
|
|
|
for txi in self.inputs:
|
|
|
|
abandon = txi.txo_ref.txo
|
|
|
|
if abandon is not None and abandon.is_my_account and abandon.script.is_claim_involved:
|
|
|
|
is_update = False
|
|
|
|
if abandon.script.is_claim_name or abandon.script.is_update_claim:
|
|
|
|
for update in self.my_update_outputs:
|
|
|
|
if abandon.claim_id == update.claim_id:
|
|
|
|
is_update = True
|
|
|
|
break
|
|
|
|
if not is_update:
|
|
|
|
yield abandon
|