Compare commits
10 commits
Author | SHA1 | Date | |
---|---|---|---|
|
d624ce7e58 | ||
|
ad00ddeab7 | ||
|
035f51b293 | ||
|
ff8daf4aa7 | ||
|
0c4d118863 | ||
|
d7fc46beec | ||
|
6a49901ac4 | ||
|
2452476d93 | ||
|
070e35b9fc | ||
|
dd7ad16bf7 |
29 changed files with 187 additions and 564 deletions
|
@ -1,2 +1,2 @@
|
|||
__version__ = "0.107.0"
|
||||
__version__ = "0.107.2"
|
||||
version = tuple(map(int, __version__.split('.'))) # pylint: disable=invalid-name
|
||||
|
|
|
@ -8,6 +8,7 @@ import time
|
|||
import inspect
|
||||
import typing
|
||||
import random
|
||||
import hashlib
|
||||
import tracemalloc
|
||||
from decimal import Decimal
|
||||
from urllib.parse import urlencode, quote
|
||||
|
@ -16,6 +17,7 @@ from binascii import hexlify, unhexlify
|
|||
from traceback import format_exc
|
||||
from functools import wraps, partial
|
||||
|
||||
import ecdsa
|
||||
import base58
|
||||
from aiohttp import web
|
||||
from prometheus_client import generate_latest as prom_generate_latest, Gauge, Histogram, Counter
|
||||
|
@ -27,7 +29,6 @@ from lbry.wallet import (
|
|||
)
|
||||
from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies, dict_values_to_lbc
|
||||
from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPE_NAMES
|
||||
from lbry.wallet.bip32 import PrivateKey
|
||||
|
||||
from lbry import utils
|
||||
from lbry.conf import Config, Setting, NOT_SET
|
||||
|
@ -2728,13 +2729,12 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
name, claim, amount, claim_address, funding_accounts, funding_accounts[0]
|
||||
)
|
||||
txo = tx.outputs[0]
|
||||
txo.set_channel_private_key(
|
||||
await funding_accounts[0].generate_channel_private_key()
|
||||
)
|
||||
await txo.generate_channel_private_key()
|
||||
|
||||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
account.add_channel_private_key(txo.private_key)
|
||||
wallet.save()
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
self.component_manager.loop.create_task(self.storage.save_claims([self._old_get_temp_claim_info(
|
||||
|
@ -2883,9 +2883,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
new_txo = tx.outputs[0]
|
||||
|
||||
if new_signing_key:
|
||||
new_txo.set_channel_private_key(
|
||||
await funding_accounts[0].generate_channel_private_key()
|
||||
)
|
||||
await new_txo.generate_channel_private_key()
|
||||
else:
|
||||
new_txo.private_key = old_txo.private_key
|
||||
|
||||
|
@ -2894,6 +2892,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await tx.sign(funding_accounts)
|
||||
|
||||
if not preview:
|
||||
account.add_channel_private_key(new_txo.private_key)
|
||||
wallet.save()
|
||||
await self.broadcast_or_release(tx, blocking)
|
||||
self.component_manager.loop.create_task(self.storage.save_claims([self._old_get_temp_claim_info(
|
||||
|
@ -3065,7 +3064,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
'channel_id': channel.claim_id,
|
||||
'holding_address': address,
|
||||
'holding_public_key': public_key.extended_key_string(),
|
||||
'signing_private_key': channel.private_key.signing_key.to_pem().decode()
|
||||
'signing_private_key': channel.private_key.to_pem().decode()
|
||||
}
|
||||
return base58.b58encode(json.dumps(export, separators=(',', ':')))
|
||||
|
||||
|
@ -3088,14 +3087,15 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
|
||||
decoded = base58.b58decode(channel_data)
|
||||
data = json.loads(decoded)
|
||||
channel_private_key = PrivateKey.from_pem(
|
||||
self.ledger, data['signing_private_key']
|
||||
channel_private_key = ecdsa.SigningKey.from_pem(
|
||||
data['signing_private_key'], hashfunc=hashlib.sha256
|
||||
)
|
||||
public_key_der = channel_private_key.get_verifying_key().to_der()
|
||||
|
||||
# check that the holding_address hasn't changed since the export was made
|
||||
holding_address = data['holding_address']
|
||||
channels, _, _, _ = await self.ledger.claim_search(
|
||||
wallet.accounts, public_key_id=channel_private_key.address
|
||||
wallet.accounts, public_key_id=self.ledger.public_key_to_address(public_key_der)
|
||||
)
|
||||
if channels and channels[0].get_address(self.ledger) != holding_address:
|
||||
holding_address = channels[0].get_address(self.ledger)
|
||||
|
|
|
@ -10,7 +10,7 @@ from lbry.schema.claim import Claim
|
|||
from lbry.schema.support import Support
|
||||
from lbry.torrent.torrent_manager import TorrentSource
|
||||
from lbry.wallet import Wallet, Ledger, Account, Transaction, Output
|
||||
from lbry.wallet.bip32 import PublicKey
|
||||
from lbry.wallet.bip32 import PubKey
|
||||
from lbry.wallet.dewies import dewies_to_lbc
|
||||
from lbry.stream.managed_stream import ManagedStream
|
||||
|
||||
|
@ -138,7 +138,7 @@ class JSONResponseEncoder(JSONEncoder):
|
|||
return self.encode_claim(obj)
|
||||
if isinstance(obj, Support):
|
||||
return obj.to_dict()
|
||||
if isinstance(obj, PublicKey):
|
||||
if isinstance(obj, PubKey):
|
||||
return obj.extended_key_string()
|
||||
if isinstance(obj, datetime):
|
||||
return obj.strftime("%Y%m%dT%H:%M:%S")
|
||||
|
|
|
@ -449,7 +449,7 @@ class SQLiteStorage(SQLiteMixin):
|
|||
return await self.db.execute_fetchall(
|
||||
"select blob.blob_hash, blob.blob_length, blob.added_on "
|
||||
"from blob left join stream_blob using (blob_hash) "
|
||||
"where stream_blob.stream_hash is null and blob.is_mine=? "
|
||||
"where stream_blob.stream_hash is null and blob.is_mine=? and blob.status='finished'"
|
||||
"order by blob.blob_length desc, blob.added_on asc",
|
||||
(is_mine,)
|
||||
)
|
||||
|
@ -463,7 +463,8 @@ class SQLiteStorage(SQLiteMixin):
|
|||
content_blobs = await self.db.execute_fetchall(
|
||||
"select blob.blob_hash, blob.blob_length, blob.added_on "
|
||||
"from blob join stream_blob using (blob_hash) cross join stream using (stream_hash)"
|
||||
"cross join file using (stream_hash) where blob.is_mine=? order by blob.added_on asc, blob.blob_length asc",
|
||||
"cross join file using (stream_hash)"
|
||||
"where blob.is_mine=? and blob.status='finished' order by blob.added_on asc, blob.blob_length asc",
|
||||
(is_mine,)
|
||||
)
|
||||
return content_blobs + sd_blobs
|
||||
|
@ -480,7 +481,8 @@ class SQLiteStorage(SQLiteMixin):
|
|||
coalesce(sum(case when
|
||||
is_mine=1
|
||||
then blob_length else 0 end), 0) as private_storage
|
||||
from blob left join stream_blob using (blob_hash) where blob_hash not in (select sd_hash from stream)
|
||||
from blob left join stream_blob using (blob_hash)
|
||||
where blob_hash not in (select sd_hash from stream) and blob.status="finished"
|
||||
""")
|
||||
return {
|
||||
'network_storage': network_size,
|
||||
|
|
|
@ -2,9 +2,6 @@ import logging
|
|||
from typing import List
|
||||
from binascii import hexlify, unhexlify
|
||||
|
||||
from asn1crypto.keys import PublicKeyInfo
|
||||
from coincurve import PublicKey as cPublicKey
|
||||
|
||||
from google.protobuf.json_format import MessageToDict
|
||||
from google.protobuf.message import DecodeError
|
||||
from hachoir.core.log import log as hachoir_log
|
||||
|
@ -349,7 +346,7 @@ class Channel(BaseClaim):
|
|||
|
||||
@property
|
||||
def public_key(self) -> str:
|
||||
return hexlify(self.public_key_bytes).decode()
|
||||
return hexlify(self.message.public_key).decode()
|
||||
|
||||
@public_key.setter
|
||||
def public_key(self, sd_public_key: str):
|
||||
|
@ -357,11 +354,7 @@ class Channel(BaseClaim):
|
|||
|
||||
@property
|
||||
def public_key_bytes(self) -> bytes:
|
||||
if len(self.message.public_key) == 33:
|
||||
return self.message.public_key
|
||||
public_key_info = PublicKeyInfo.load(self.message.public_key)
|
||||
public_key = cPublicKey(public_key_info.native['public_key'])
|
||||
return public_key.format(compressed=True)
|
||||
return self.message.public_key
|
||||
|
||||
@public_key_bytes.setter
|
||||
def public_key_bytes(self, public_key: bytes):
|
||||
|
|
|
@ -17,10 +17,8 @@ from functools import partial
|
|||
from lbry.wallet import WalletManager, Wallet, Ledger, Account, Transaction
|
||||
from lbry.conf import Config
|
||||
from lbry.wallet.util import satoshis_to_coins
|
||||
from lbry.wallet.dewies import lbc_to_dewies
|
||||
from lbry.wallet.orchstr8 import Conductor
|
||||
from lbry.wallet.orchstr8.node import LBCWalletNode, WalletNode, HubNode
|
||||
from lbry.schema.claim import Claim
|
||||
|
||||
from lbry.extras.daemon.daemon import Daemon, jsonrpc_dumps_pretty
|
||||
from lbry.extras.daemon.components import Component, WalletComponent
|
||||
|
@ -559,19 +557,6 @@ class CommandTestCase(IntegrationTestCase):
|
|||
return self.sout(tx)
|
||||
return tx
|
||||
|
||||
async def create_nondeterministic_channel(self, name, price, pubkey_bytes, daemon=None, blocking=False):
|
||||
account = (daemon or self.daemon).wallet_manager.default_account
|
||||
claim_address = await account.receiving.get_or_create_usable_address()
|
||||
claim = Claim()
|
||||
claim.channel.public_key_bytes = pubkey_bytes
|
||||
tx = await Transaction.claim_create(
|
||||
name, claim, lbc_to_dewies(price),
|
||||
claim_address, [self.account], self.account
|
||||
)
|
||||
await tx.sign([self.account])
|
||||
await (daemon or self.daemon).broadcast_or_release(tx, blocking)
|
||||
return self.sout(tx)
|
||||
|
||||
def create_upload_file(self, data, prefix=None, suffix=None):
|
||||
file_path = tempfile.mktemp(prefix=prefix or "tmp", suffix=suffix or "", dir=self.daemon.conf.upload_dir)
|
||||
with open(file_path, 'w+b') as file:
|
||||
|
|
|
@ -3,11 +3,11 @@ __lbcctl__ = 'lbcctl'
|
|||
__lbcwallet__ = 'lbcwallet'
|
||||
__lbcd_url__ = (
|
||||
'https://github.com/lbryio/lbcd/releases/download/' +
|
||||
'v0.22.200-beta/lbcd_0.22.200-beta_TARGET_PLATFORM.tar.gz'
|
||||
'v0.22.100-rc.0/lbcd_0.22.100-rc.0_TARGET_PLATFORM.tar.gz'
|
||||
)
|
||||
__lbcwallet_url__ = (
|
||||
'https://github.com/lbryio/lbcwallet/releases/download/' +
|
||||
'v0.13.100-alpha-rc2/lbcwallet_0.13.100-alpha-rc2_TARGET_PLATFORM.tar.gz'
|
||||
'v0.13.100-alpha.0/lbcwallet_0.13.100-alpha.0_TARGET_PLATFORM.tar.gz'
|
||||
)
|
||||
__spvserver__ = 'lbry.wallet.server.coin.LBCRegTest'
|
||||
|
||||
|
@ -15,8 +15,7 @@ from lbry.wallet.wallet import Wallet, WalletStorage, TimestampedPreferences, EN
|
|||
from lbry.wallet.manager import WalletManager
|
||||
from lbry.wallet.network import Network
|
||||
from lbry.wallet.ledger import Ledger, RegTestLedger, TestNetLedger, BlockHeightEvent
|
||||
from lbry.wallet.account import Account, AddressManager, SingleKey, HierarchicalDeterministic, \
|
||||
DeterministicChannelKeyManager
|
||||
from lbry.wallet.account import Account, AddressManager, SingleKey, HierarchicalDeterministic
|
||||
from lbry.wallet.transaction import Transaction, Output, Input
|
||||
from lbry.wallet.script import OutputScript, InputScript
|
||||
from lbry.wallet.database import SQLiteMixin, Database
|
||||
|
|
|
@ -9,10 +9,11 @@ from hashlib import sha256
|
|||
from string import hexdigits
|
||||
from typing import Type, Dict, Tuple, Optional, Any, List
|
||||
|
||||
import ecdsa
|
||||
from lbry.error import InvalidPasswordError
|
||||
from lbry.crypto.crypt import aes_encrypt, aes_decrypt
|
||||
|
||||
from .bip32 import PrivateKey, PublicKey, KeyPath, from_extended_key_string
|
||||
from .bip32 import PrivateKey, PubKey, from_extended_key_string
|
||||
from .mnemonic import Mnemonic
|
||||
from .constants import COIN, TXO_TYPES
|
||||
from .transaction import Transaction, Input, Output
|
||||
|
@ -33,49 +34,6 @@ def validate_claim_id(claim_id):
|
|||
raise Exception("Claim id is not hex encoded")
|
||||
|
||||
|
||||
class DeterministicChannelKeyManager:
|
||||
|
||||
def __init__(self, account: 'Account'):
|
||||
self.account = account
|
||||
self.last_known = 0
|
||||
self.cache = {}
|
||||
self._private_key: Optional[PrivateKey] = None
|
||||
|
||||
@property
|
||||
def private_key(self):
|
||||
if self._private_key is None:
|
||||
if self.account.private_key is not None:
|
||||
self._private_key = self.account.private_key.child(KeyPath.CHANNEL)
|
||||
return self._private_key
|
||||
|
||||
def maybe_generate_deterministic_key_for_channel(self, txo):
|
||||
if self.private_key is None:
|
||||
return
|
||||
next_private_key = self.private_key.child(self.last_known)
|
||||
public_key = next_private_key.public_key
|
||||
public_key_bytes = public_key.pubkey_bytes
|
||||
if txo.claim.channel.public_key_bytes == public_key_bytes:
|
||||
self.cache[public_key.address] = next_private_key
|
||||
self.last_known += 1
|
||||
|
||||
async def ensure_cache_primed(self):
|
||||
if self.private_key is not None:
|
||||
await self.generate_next_key()
|
||||
|
||||
async def generate_next_key(self) -> PrivateKey:
|
||||
db = self.account.ledger.db
|
||||
while True:
|
||||
next_private_key = self.private_key.child(self.last_known)
|
||||
public_key = next_private_key.public_key
|
||||
self.cache[public_key.address] = next_private_key
|
||||
if not await db.is_channel_key_used(self.account, public_key):
|
||||
return next_private_key
|
||||
self.last_known += 1
|
||||
|
||||
def get_private_key_from_pubkey_hash(self, pubkey_hash) -> PrivateKey:
|
||||
return self.cache.get(pubkey_hash)
|
||||
|
||||
|
||||
class AddressManager:
|
||||
|
||||
name: str
|
||||
|
@ -121,7 +79,7 @@ class AddressManager:
|
|||
def get_private_key(self, index: int) -> PrivateKey:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_public_key(self, index: int) -> PublicKey:
|
||||
def get_public_key(self, index: int) -> PubKey:
|
||||
raise NotImplementedError
|
||||
|
||||
async def get_max_gap(self):
|
||||
|
@ -161,8 +119,8 @@ class HierarchicalDeterministic(AddressManager):
|
|||
@classmethod
|
||||
def from_dict(cls, account: 'Account', d: dict) -> Tuple[AddressManager, AddressManager]:
|
||||
return (
|
||||
cls(account, KeyPath.RECEIVE, **d.get('receiving', {'gap': 20, 'maximum_uses_per_address': 1})),
|
||||
cls(account, KeyPath.CHANGE, **d.get('change', {'gap': 6, 'maximum_uses_per_address': 1}))
|
||||
cls(account, 0, **d.get('receiving', {'gap': 20, 'maximum_uses_per_address': 1})),
|
||||
cls(account, 1, **d.get('change', {'gap': 6, 'maximum_uses_per_address': 1}))
|
||||
)
|
||||
|
||||
def merge(self, d: dict):
|
||||
|
@ -175,7 +133,7 @@ class HierarchicalDeterministic(AddressManager):
|
|||
def get_private_key(self, index: int) -> PrivateKey:
|
||||
return self.account.private_key.child(self.chain_number).child(index)
|
||||
|
||||
def get_public_key(self, index: int) -> PublicKey:
|
||||
def get_public_key(self, index: int) -> PubKey:
|
||||
return self.account.public_key.child(self.chain_number).child(index)
|
||||
|
||||
async def get_max_gap(self) -> int:
|
||||
|
@ -235,7 +193,7 @@ class SingleKey(AddressManager):
|
|||
@classmethod
|
||||
def from_dict(cls, account: 'Account', d: dict) \
|
||||
-> Tuple[AddressManager, AddressManager]:
|
||||
same_address_manager = cls(account, account.public_key, KeyPath.RECEIVE)
|
||||
same_address_manager = cls(account, account.public_key, 0)
|
||||
return same_address_manager, same_address_manager
|
||||
|
||||
def to_dict_instance(self):
|
||||
|
@ -244,7 +202,7 @@ class SingleKey(AddressManager):
|
|||
def get_private_key(self, index: int) -> PrivateKey:
|
||||
return self.account.private_key
|
||||
|
||||
def get_public_key(self, index: int) -> PublicKey:
|
||||
def get_public_key(self, index: int) -> PubKey:
|
||||
return self.account.public_key
|
||||
|
||||
async def get_max_gap(self) -> int:
|
||||
|
@ -266,6 +224,9 @@ class SingleKey(AddressManager):
|
|||
|
||||
class Account:
|
||||
|
||||
mnemonic_class = Mnemonic
|
||||
private_key_class = PrivateKey
|
||||
public_key_class = PubKey
|
||||
address_generators: Dict[str, Type[AddressManager]] = {
|
||||
SingleKey.name: SingleKey,
|
||||
HierarchicalDeterministic.name: HierarchicalDeterministic,
|
||||
|
@ -273,7 +234,7 @@ class Account:
|
|||
|
||||
def __init__(self, ledger: 'Ledger', wallet: 'Wallet', name: str,
|
||||
seed: str, private_key_string: str, encrypted: bool,
|
||||
private_key: Optional[PrivateKey], public_key: PublicKey,
|
||||
private_key: Optional[PrivateKey], public_key: PubKey,
|
||||
address_generator: dict, modified_on: float, channel_keys: dict) -> None:
|
||||
self.ledger = ledger
|
||||
self.wallet = wallet
|
||||
|
@ -284,14 +245,13 @@ class Account:
|
|||
self.private_key_string = private_key_string
|
||||
self.init_vectors: Dict[str, bytes] = {}
|
||||
self.encrypted = encrypted
|
||||
self.private_key: Optional[PrivateKey] = private_key
|
||||
self.public_key: PublicKey = public_key
|
||||
self.private_key = private_key
|
||||
self.public_key = public_key
|
||||
generator_name = address_generator.get('name', HierarchicalDeterministic.name)
|
||||
self.address_generator = self.address_generators[generator_name]
|
||||
self.receiving, self.change = self.address_generator.from_dict(self, address_generator)
|
||||
self.address_managers = {am.chain_number: am for am in (self.receiving, self.change)}
|
||||
self.channel_keys = channel_keys
|
||||
self.deterministic_channel_keys = DeterministicChannelKeyManager(self)
|
||||
ledger.add_account(self)
|
||||
wallet.add_account(self)
|
||||
|
||||
|
@ -306,19 +266,19 @@ class Account:
|
|||
name: str = None, address_generator: dict = None):
|
||||
return cls.from_dict(ledger, wallet, {
|
||||
'name': name,
|
||||
'seed': Mnemonic().make_seed(),
|
||||
'seed': cls.mnemonic_class().make_seed(),
|
||||
'address_generator': address_generator or {}
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def get_private_key_from_seed(cls, ledger: 'Ledger', seed: str, password: str):
|
||||
return PrivateKey.from_seed(
|
||||
ledger, Mnemonic.mnemonic_to_seed(seed, password or 'lbryum')
|
||||
return cls.private_key_class.from_seed(
|
||||
ledger, cls.mnemonic_class.mnemonic_to_seed(seed, password or 'lbryum')
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def keys_from_dict(cls, ledger: 'Ledger', d: dict) \
|
||||
-> Tuple[str, Optional[PrivateKey], PublicKey]:
|
||||
-> Tuple[str, Optional[PrivateKey], PubKey]:
|
||||
seed = d.get('seed', '')
|
||||
private_key_string = d.get('private_key', '')
|
||||
private_key = None
|
||||
|
@ -489,7 +449,7 @@ class Account:
|
|||
assert not self.encrypted, "Cannot get private key on encrypted wallet account."
|
||||
return self.address_managers[chain].get_private_key(index)
|
||||
|
||||
def get_public_key(self, chain: int, index: int) -> PublicKey:
|
||||
def get_public_key(self, chain: int, index: int) -> PubKey:
|
||||
return self.address_managers[chain].get_public_key(index)
|
||||
|
||||
def get_balance(self, confirmations=0, include_claims=False, read_only=False, **constraints):
|
||||
|
@ -560,30 +520,33 @@ class Account:
|
|||
|
||||
return tx
|
||||
|
||||
async def generate_channel_private_key(self):
|
||||
return await self.deterministic_channel_keys.generate_next_key()
|
||||
def add_channel_private_key(self, private_key):
|
||||
public_key_bytes = private_key.get_verifying_key().to_der()
|
||||
channel_pubkey_hash = self.ledger.public_key_to_address(public_key_bytes)
|
||||
self.channel_keys[channel_pubkey_hash] = private_key.to_pem().decode()
|
||||
|
||||
def add_channel_private_key(self, private_key: PrivateKey):
|
||||
self.channel_keys[private_key.address] = private_key.to_pem().decode()
|
||||
|
||||
async def get_channel_private_key(self, public_key_bytes) -> PrivateKey:
|
||||
async def get_channel_private_key(self, public_key_bytes):
|
||||
channel_pubkey_hash = self.ledger.public_key_to_address(public_key_bytes)
|
||||
private_key_pem = self.channel_keys.get(channel_pubkey_hash)
|
||||
if private_key_pem:
|
||||
return PrivateKey.from_pem(self.ledger, private_key_pem)
|
||||
return self.deterministic_channel_keys.get_private_key_from_pubkey_hash(channel_pubkey_hash)
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
None, ecdsa.SigningKey.from_pem, private_key_pem, sha256
|
||||
)
|
||||
|
||||
async def maybe_migrate_certificates(self):
|
||||
def to_der(private_key_pem):
|
||||
return ecdsa.SigningKey.from_pem(private_key_pem, hashfunc=sha256).get_verifying_key().to_der()
|
||||
|
||||
if not self.channel_keys:
|
||||
return
|
||||
channel_keys = {}
|
||||
for private_key_pem in self.channel_keys.values():
|
||||
if not isinstance(private_key_pem, str):
|
||||
continue
|
||||
if not private_key_pem.startswith("-----BEGIN"):
|
||||
if "-----BEGIN EC PRIVATE KEY-----" not in private_key_pem:
|
||||
continue
|
||||
private_key = PrivateKey.from_pem(self.ledger, private_key_pem)
|
||||
channel_keys[private_key.address] = private_key_pem
|
||||
public_key_der = await asyncio.get_event_loop().run_in_executor(None, to_der, private_key_pem)
|
||||
channel_keys[self.ledger.public_key_to_address(public_key_der)] = private_key_pem
|
||||
if self.channel_keys != channel_keys:
|
||||
self.channel_keys = channel_keys
|
||||
self.wallet.save()
|
||||
|
|
|
@ -1,21 +1,10 @@
|
|||
from asn1crypto.keys import PrivateKeyInfo, ECPrivateKey
|
||||
from coincurve import PublicKey as cPublicKey, PrivateKey as cPrivateKey
|
||||
from coincurve.utils import (
|
||||
pem_to_der, lib as libsecp256k1, ffi as libsecp256k1_ffi
|
||||
)
|
||||
from coincurve.ecdsa import CDATA_SIG_LENGTH
|
||||
from coincurve import PublicKey, PrivateKey as _PrivateKey
|
||||
|
||||
from lbry.crypto.hash import hmac_sha512, hash160, double_sha256
|
||||
from lbry.crypto.base58 import Base58
|
||||
from .util import cachedproperty
|
||||
|
||||
|
||||
class KeyPath:
|
||||
RECEIVE = 0
|
||||
CHANGE = 1
|
||||
CHANNEL = 2
|
||||
|
||||
|
||||
class DerivationError(Exception):
|
||||
""" Raised when an invalid derivation occurs. """
|
||||
|
||||
|
@ -82,30 +71,26 @@ class _KeyBase:
|
|||
return Base58.encode_check(self.extended_key())
|
||||
|
||||
|
||||
class PublicKey(_KeyBase):
|
||||
class PubKey(_KeyBase):
|
||||
""" A BIP32 public key. """
|
||||
|
||||
def __init__(self, ledger, pubkey, chain_code, n, depth, parent=None):
|
||||
super().__init__(ledger, chain_code, n, depth, parent)
|
||||
if isinstance(pubkey, cPublicKey):
|
||||
if isinstance(pubkey, PublicKey):
|
||||
self.verifying_key = pubkey
|
||||
else:
|
||||
self.verifying_key = self._verifying_key_from_pubkey(pubkey)
|
||||
|
||||
@classmethod
|
||||
def from_compressed(cls, public_key_bytes, ledger=None) -> 'PublicKey':
|
||||
return cls(ledger, public_key_bytes, bytes((0,)*32), 0, 0)
|
||||
|
||||
@classmethod
|
||||
def _verifying_key_from_pubkey(cls, pubkey):
|
||||
""" Converts a 33-byte compressed pubkey into an coincurve.PublicKey object. """
|
||||
""" Converts a 33-byte compressed pubkey into an PublicKey object. """
|
||||
if not isinstance(pubkey, (bytes, bytearray)):
|
||||
raise TypeError('pubkey must be raw bytes')
|
||||
if len(pubkey) != 33:
|
||||
raise ValueError('pubkey must be 33 bytes')
|
||||
if pubkey[0] not in (2, 3):
|
||||
raise ValueError('invalid pubkey prefix byte')
|
||||
return cPublicKey(pubkey)
|
||||
return PublicKey(pubkey)
|
||||
|
||||
@cachedproperty
|
||||
def pubkey_bytes(self):
|
||||
|
@ -120,7 +105,7 @@ class PublicKey(_KeyBase):
|
|||
def ec_point(self):
|
||||
return self.verifying_key.point()
|
||||
|
||||
def child(self, n: int) -> 'PublicKey':
|
||||
def child(self, n: int):
|
||||
""" Return the derived child extended pubkey at index N. """
|
||||
if not 0 <= n < (1 << 31):
|
||||
raise ValueError('invalid BIP32 public key child number')
|
||||
|
@ -128,7 +113,7 @@ class PublicKey(_KeyBase):
|
|||
msg = self.pubkey_bytes + n.to_bytes(4, 'big')
|
||||
L_b, R_b = self._hmac_sha512(msg) # pylint: disable=invalid-name
|
||||
derived_key = self.verifying_key.add(L_b)
|
||||
return PublicKey(self.ledger, derived_key, R_b, n, self.depth + 1, self)
|
||||
return PubKey(self.ledger, derived_key, R_b, n, self.depth + 1, self)
|
||||
|
||||
def identifier(self):
|
||||
""" Return the key's identifier as 20 bytes. """
|
||||
|
@ -141,36 +126,6 @@ class PublicKey(_KeyBase):
|
|||
self.pubkey_bytes
|
||||
)
|
||||
|
||||
def verify(self, signature, digest) -> bool:
|
||||
""" Verify that a signature is valid for a 32 byte digest. """
|
||||
|
||||
if len(signature) != 64:
|
||||
raise ValueError('Signature must be 64 bytes long.')
|
||||
|
||||
if len(digest) != 32:
|
||||
raise ValueError('Digest must be 32 bytes long.')
|
||||
|
||||
key = self.verifying_key
|
||||
|
||||
raw_signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
|
||||
|
||||
parsed = libsecp256k1.secp256k1_ecdsa_signature_parse_compact(
|
||||
key.context.ctx, raw_signature, signature
|
||||
)
|
||||
assert parsed == 1
|
||||
|
||||
normalized_signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
|
||||
|
||||
libsecp256k1.secp256k1_ecdsa_signature_normalize(
|
||||
key.context.ctx, normalized_signature, raw_signature
|
||||
)
|
||||
|
||||
verified = libsecp256k1.secp256k1_ecdsa_verify(
|
||||
key.context.ctx, normalized_signature, digest, key.public_key
|
||||
)
|
||||
|
||||
return bool(verified)
|
||||
|
||||
|
||||
class PrivateKey(_KeyBase):
|
||||
"""A BIP32 private key."""
|
||||
|
@ -179,7 +134,7 @@ class PrivateKey(_KeyBase):
|
|||
|
||||
def __init__(self, ledger, privkey, chain_code, n, depth, parent=None):
|
||||
super().__init__(ledger, chain_code, n, depth, parent)
|
||||
if isinstance(privkey, cPrivateKey):
|
||||
if isinstance(privkey, _PrivateKey):
|
||||
self.signing_key = privkey
|
||||
else:
|
||||
self.signing_key = self._signing_key_from_privkey(privkey)
|
||||
|
@ -187,7 +142,7 @@ class PrivateKey(_KeyBase):
|
|||
@classmethod
|
||||
def _signing_key_from_privkey(cls, private_key):
|
||||
""" Converts a 32-byte private key into an coincurve.PrivateKey object. """
|
||||
return cPrivateKey.from_int(PrivateKey._private_key_secret_exponent(private_key))
|
||||
return _PrivateKey.from_int(PrivateKey._private_key_secret_exponent(private_key))
|
||||
|
||||
@classmethod
|
||||
def _private_key_secret_exponent(cls, private_key):
|
||||
|
@ -199,36 +154,24 @@ class PrivateKey(_KeyBase):
|
|||
return int.from_bytes(private_key, 'big')
|
||||
|
||||
@classmethod
|
||||
def from_seed(cls, ledger, seed) -> 'PrivateKey':
|
||||
def from_seed(cls, ledger, seed):
|
||||
# This hard-coded message string seems to be coin-independent...
|
||||
hmac = hmac_sha512(b'Bitcoin seed', seed)
|
||||
privkey, chain_code = hmac[:32], hmac[32:]
|
||||
return cls(ledger, privkey, chain_code, 0, 0)
|
||||
|
||||
@classmethod
|
||||
def from_pem(cls, ledger, pem) -> 'PrivateKey':
|
||||
der = pem_to_der(pem.encode())
|
||||
try:
|
||||
key_int = ECPrivateKey.load(der).native['private_key']
|
||||
except ValueError:
|
||||
key_int = PrivateKeyInfo.load(der).native['private_key']['private_key']
|
||||
private_key = cPrivateKey.from_int(key_int)
|
||||
return cls(ledger, private_key, bytes((0,)*32), 0, 0)
|
||||
|
||||
@cachedproperty
|
||||
def private_key_bytes(self):
|
||||
""" Return the serialized private key (no leading zero byte). """
|
||||
return self.signing_key.secret
|
||||
|
||||
@cachedproperty
|
||||
def public_key(self) -> PublicKey:
|
||||
def public_key(self):
|
||||
""" Return the corresponding extended public key. """
|
||||
verifying_key = self.signing_key.public_key
|
||||
parent_pubkey = self.parent.public_key if self.parent else None
|
||||
return PublicKey(
|
||||
self.ledger, verifying_key, self.chain_code,
|
||||
self.n, self.depth, parent_pubkey
|
||||
)
|
||||
return PubKey(self.ledger, verifying_key, self.chain_code, self.n, self.depth,
|
||||
parent_pubkey)
|
||||
|
||||
def ec_point(self):
|
||||
return self.public_key.ec_point()
|
||||
|
@ -241,12 +184,11 @@ class PrivateKey(_KeyBase):
|
|||
""" Return the private key encoded in Wallet Import Format. """
|
||||
return self.ledger.private_key_to_wif(self.private_key_bytes)
|
||||
|
||||
@property
|
||||
def address(self):
|
||||
""" The public key as a P2PKH address. """
|
||||
return self.public_key.address
|
||||
|
||||
def child(self, n) -> 'PrivateKey':
|
||||
def child(self, n):
|
||||
""" Return the derived child extended private key at index N."""
|
||||
if not 0 <= n < (1 << 32):
|
||||
raise ValueError('invalid BIP32 private key child number')
|
||||
|
@ -265,28 +207,6 @@ class PrivateKey(_KeyBase):
|
|||
""" Produce a signature for piece of data by double hashing it and signing the hash. """
|
||||
return self.signing_key.sign(data, hasher=double_sha256)
|
||||
|
||||
def sign_compact(self, digest):
|
||||
""" Produce a compact signature. """
|
||||
key = self.signing_key
|
||||
|
||||
signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
|
||||
signed = libsecp256k1.secp256k1_ecdsa_sign(
|
||||
key.context.ctx, signature, digest, key.secret,
|
||||
libsecp256k1_ffi.NULL, libsecp256k1_ffi.NULL
|
||||
)
|
||||
|
||||
if not signed:
|
||||
raise ValueError('The private key was invalid.')
|
||||
|
||||
serialized = libsecp256k1_ffi.new('unsigned char[%d]' % CDATA_SIG_LENGTH)
|
||||
compacted = libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(
|
||||
key.context.ctx, serialized, signature
|
||||
)
|
||||
if compacted != 1:
|
||||
raise ValueError('The signature could not be compacted.')
|
||||
|
||||
return bytes(libsecp256k1_ffi.buffer(serialized, CDATA_SIG_LENGTH))
|
||||
|
||||
def identifier(self):
|
||||
"""Return the key's identifier as 20 bytes."""
|
||||
return self.public_key.identifier()
|
||||
|
@ -298,12 +218,9 @@ class PrivateKey(_KeyBase):
|
|||
b'\0' + self.private_key_bytes
|
||||
)
|
||||
|
||||
def to_pem(self):
|
||||
return self.signing_key.to_pem()
|
||||
|
||||
|
||||
def _from_extended_key(ledger, ekey):
|
||||
"""Return a PublicKey or PrivateKey from an extended key raw bytes."""
|
||||
"""Return a PubKey or PrivateKey from an extended key raw bytes."""
|
||||
if not isinstance(ekey, (bytes, bytearray)):
|
||||
raise TypeError('extended key must be raw bytes')
|
||||
if len(ekey) != 78:
|
||||
|
@ -315,7 +232,7 @@ def _from_extended_key(ledger, ekey):
|
|||
|
||||
if ekey[:4] == ledger.extended_public_key_prefix:
|
||||
pubkey = ekey[45:]
|
||||
key = PublicKey(ledger, pubkey, chain_code, n, depth)
|
||||
key = PubKey(ledger, pubkey, chain_code, n, depth)
|
||||
elif ekey[:4] == ledger.extended_private_key_prefix:
|
||||
if ekey[45] != 0:
|
||||
raise ValueError('invalid extended private key prefix byte')
|
||||
|
@ -333,6 +250,6 @@ def from_extended_key_string(ledger, ekey_str):
|
|||
xpub6BsnM1W2Y7qLMiuhi7f7dbAwQZ5Cz5gYJCRzTNainXzQXYjFwtuQXHd
|
||||
3qfi3t3KJtHxshXezfjft93w4UE7BGMtKwhqEHae3ZA7d823DVrL
|
||||
|
||||
return a PublicKey or PrivateKey.
|
||||
return a PubKey or PrivateKey.
|
||||
"""
|
||||
return _from_extended_key(ledger, Base58.decode_check(ekey_str))
|
||||
|
|
|
@ -9,11 +9,10 @@ from dataclasses import dataclass
|
|||
from contextvars import ContextVar
|
||||
from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable, Dict, Optional
|
||||
from datetime import date
|
||||
|
||||
from prometheus_client import Gauge, Counter, Histogram
|
||||
from lbry.utils import LockWithMetrics
|
||||
|
||||
from .bip32 import PublicKey
|
||||
from .bip32 import PubKey
|
||||
from .transaction import Transaction, Output, OutputScript, TXRefImmutable, Input
|
||||
from .constants import TXO_TYPES, CLAIM_TYPES
|
||||
from .util import date_to_julian_day
|
||||
|
@ -976,9 +975,7 @@ class Database(SQLiteMixin):
|
|||
sql.append("LEFT JOIN txi ON (txi.position=0 AND txi.txid=txo.txid)")
|
||||
return await self.db.execute_fetchall(*query(' '.join(sql), **constraints), read_only=read_only)
|
||||
|
||||
async def get_txos(
|
||||
self, wallet=None, no_tx=False, no_channel_info=False, read_only=False, **constraints
|
||||
) -> List[Output]:
|
||||
async def get_txos(self, wallet=None, no_tx=False, no_channel_info=False, read_only=False, **constraints):
|
||||
include_is_spent = constraints.get('include_is_spent', False)
|
||||
include_is_my_input = constraints.get('include_is_my_input', False)
|
||||
include_is_my_output = constraints.pop('include_is_my_output', False)
|
||||
|
@ -1204,7 +1201,7 @@ class Database(SQLiteMixin):
|
|||
addresses = await self.select_addresses(', '.join(cols), read_only=read_only, **constraints)
|
||||
if 'pubkey' in cols:
|
||||
for address in addresses:
|
||||
address['pubkey'] = PublicKey(
|
||||
address['pubkey'] = PubKey(
|
||||
self.ledger, address.pop('pubkey'), address.pop('chain_code'),
|
||||
address.pop('n'), address.pop('depth')
|
||||
)
|
||||
|
@ -1244,18 +1241,6 @@ class Database(SQLiteMixin):
|
|||
async def set_address_history(self, address, history):
|
||||
await self._set_address_history(address, history)
|
||||
|
||||
async def is_channel_key_used(self, account, key: PublicKey):
|
||||
channels = await self.get_txos(
|
||||
accounts=[account], txo_type=TXO_TYPES['channel'],
|
||||
no_tx=True, no_channel_info=True
|
||||
)
|
||||
other_key_bytes = key.pubkey_bytes
|
||||
for channel in channels:
|
||||
claim = channel.can_decode_claim
|
||||
if claim and claim.channel.public_key_bytes == other_key_bytes:
|
||||
return True
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def constrain_purchases(constraints):
|
||||
accounts = constraints.pop('accounts', None)
|
||||
|
|
|
@ -26,7 +26,7 @@ from lbry.wallet.transaction import Transaction, Output
|
|||
from lbry.wallet.header import Headers, UnvalidatedHeaders
|
||||
from lbry.wallet.checkpoints import HASHES
|
||||
from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPES, COIN, NULL_HASH32
|
||||
from lbry.wallet.bip32 import PublicKey, PrivateKey
|
||||
from lbry.wallet.bip32 import PubKey, PrivateKey
|
||||
from lbry.wallet.coinselection import CoinSelector
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -226,7 +226,7 @@ class Ledger(metaclass=LedgerRegistry):
|
|||
return account.get_private_key(address_info['chain'], address_info['pubkey'].n)
|
||||
return None
|
||||
|
||||
async def get_public_key_for_address(self, wallet, address) -> Optional[PublicKey]:
|
||||
async def get_public_key_for_address(self, wallet, address) -> Optional[PubKey]:
|
||||
match = await self._get_account_and_address_info_for_address(wallet, address)
|
||||
if match:
|
||||
_, address_info = match
|
||||
|
@ -474,7 +474,6 @@ class Ledger(metaclass=LedgerRegistry):
|
|||
for address_manager in account.address_managers.values():
|
||||
await self.subscribe_addresses(address_manager, await address_manager.get_addresses())
|
||||
await account.ensure_address_gap()
|
||||
await account.deterministic_channel_keys.ensure_cache_primed()
|
||||
|
||||
async def unsubscribe_account(self, account: Account):
|
||||
for address in await account.get_addresses():
|
||||
|
@ -555,7 +554,6 @@ class Ledger(metaclass=LedgerRegistry):
|
|||
)
|
||||
remote_history_txids = {txid for txid, _ in remote_history}
|
||||
async for tx in self.request_synced_transactions(to_request, remote_history_txids, address):
|
||||
self.maybe_has_channel_key(tx)
|
||||
pending_synced_history[tx_indexes[tx.id]] = f"{tx.id}:{tx.height}:"
|
||||
if len(pending_synced_history) % 100 == 0:
|
||||
log.info("Syncing address %s: %d/%d", address, len(pending_synced_history), len(to_request))
|
||||
|
@ -623,12 +621,6 @@ class Ledger(metaclass=LedgerRegistry):
|
|||
tx.is_verified = merkle_root == header['merkle_root']
|
||||
return tx
|
||||
|
||||
def maybe_has_channel_key(self, tx):
|
||||
for txo in tx._outputs:
|
||||
if txo.can_decode_claim and txo.claim.is_channel:
|
||||
for account in self.accounts:
|
||||
account.deterministic_channel_keys.maybe_generate_deterministic_key_for_channel(txo)
|
||||
|
||||
async def request_transactions(self, to_request: Tuple[Tuple[str, int], ...], cached=False):
|
||||
batches = [[]]
|
||||
remote_heights = {}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
__hub_url__ = (
|
||||
"https://github.com/lbryio/hub/releases/download/v0.2022.01.21.1/hub"
|
||||
"https://github.com/lbryio/herald/releases/download/v0.2022.01.21.1/hub"
|
||||
)
|
||||
from lbry.wallet.orchstr8.node import Conductor
|
||||
from lbry.wallet.orchstr8.service import ConductorService
|
||||
|
|
|
@ -1,9 +1,19 @@
|
|||
import struct
|
||||
import hashlib
|
||||
import logging
|
||||
import typing
|
||||
import asyncio
|
||||
from binascii import hexlify, unhexlify
|
||||
from typing import List, Iterable, Optional, Tuple
|
||||
|
||||
import ecdsa
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import ec
|
||||
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
|
||||
from lbry.error import InsufficientFundsError
|
||||
from lbry.crypto.hash import hash160, sha256
|
||||
from lbry.crypto.base58 import Base58
|
||||
|
@ -18,7 +28,6 @@ from .constants import COIN, DUST, NULL_HASH32
|
|||
from .bcd_data_stream import BCDataStream
|
||||
from .hash import TXRef, TXRefImmutable
|
||||
from .util import ReadOnlyList
|
||||
from .bip32 import PrivateKey, PublicKey
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from lbry.wallet.account import Account
|
||||
|
@ -213,8 +222,7 @@ class Output(InputOutput):
|
|||
is_my_output: Optional[bool] = None, is_my_input: Optional[bool] = None,
|
||||
sent_supports: Optional[int] = None, sent_tips: Optional[int] = None,
|
||||
received_tips: Optional[int] = None,
|
||||
channel: Optional['Output'] = None,
|
||||
private_key: Optional[PrivateKey] = None
|
||||
channel: Optional['Output'] = None, private_key: Optional[str] = None
|
||||
) -> None:
|
||||
super().__init__(tx_ref, position)
|
||||
self.amount = amount
|
||||
|
@ -227,7 +235,7 @@ class Output(InputOutput):
|
|||
self.sent_tips = sent_tips
|
||||
self.received_tips = received_tips
|
||||
self.channel = channel
|
||||
self.private_key: PrivateKey = private_key
|
||||
self.private_key = private_key
|
||||
self.purchase: 'Output' = None # txo containing purchase metadata
|
||||
self.purchased_claim: 'Output' = None # resolved claim pointed to by purchase
|
||||
self.purchase_receipt: 'Output' = None # txo representing purchase receipt for this claim
|
||||
|
@ -417,15 +425,25 @@ class Output(InputOutput):
|
|||
]
|
||||
return sha256(b''.join(pieces))
|
||||
|
||||
def get_encoded_signature(self):
|
||||
signature = hexlify(self.signable.signature)
|
||||
r = int(signature[:int(len(signature)/2)], 16)
|
||||
s = int(signature[int(len(signature)/2):], 16)
|
||||
return ecdsa.util.sigencode_der(r, s, len(signature)*4)
|
||||
|
||||
@staticmethod
|
||||
def is_signature_valid(signature, digest, public_key_bytes):
|
||||
return PublicKey\
|
||||
.from_compressed(public_key_bytes)\
|
||||
.verify(signature, digest)
|
||||
def is_signature_valid(encoded_signature, signature_digest, public_key_bytes):
|
||||
try:
|
||||
public_key = load_der_public_key(public_key_bytes, default_backend())
|
||||
public_key.verify(encoded_signature, signature_digest, ec.ECDSA(Prehashed(hashes.SHA256())))
|
||||
return True
|
||||
except (ValueError, InvalidSignature):
|
||||
pass
|
||||
return False
|
||||
|
||||
def is_signed_by(self, channel: 'Output', ledger=None):
|
||||
return self.is_signature_valid(
|
||||
self.signable.signature,
|
||||
self.get_encoded_signature(),
|
||||
self.get_signature_digest(ledger),
|
||||
channel.claim.channel.public_key_bytes
|
||||
)
|
||||
|
@ -438,27 +456,29 @@ class Output(InputOutput):
|
|||
self.signable.signing_channel_hash,
|
||||
self.signable.to_message_bytes()
|
||||
]))
|
||||
self.signable.signature = channel.private_key.sign_compact(digest)
|
||||
self.signable.signature = channel.private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256)
|
||||
self.script.generate()
|
||||
|
||||
def sign_data(self, data: bytes, timestamp: str) -> str:
|
||||
def sign_data(self, data:bytes, timestamp:str) -> str:
|
||||
pieces = [timestamp.encode(), self.claim_hash, data]
|
||||
digest = sha256(b''.join(pieces))
|
||||
signature = self.private_key.sign_compact(digest)
|
||||
signature = self.private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256)
|
||||
return hexlify(signature).decode()
|
||||
|
||||
def clear_signature(self):
|
||||
self.channel = None
|
||||
self.signable.clear_signature()
|
||||
|
||||
def set_channel_private_key(self, private_key: PrivateKey):
|
||||
self.private_key = private_key
|
||||
self.claim.channel.public_key_bytes = private_key.public_key.pubkey_bytes
|
||||
async def generate_channel_private_key(self):
|
||||
self.private_key = await asyncio.get_event_loop().run_in_executor(
|
||||
None, ecdsa.SigningKey.generate, ecdsa.SECP256k1, None, hashlib.sha256
|
||||
)
|
||||
self.claim.channel.public_key_bytes = self.private_key.get_verifying_key().to_der()
|
||||
self.script.generate()
|
||||
return self.private_key
|
||||
|
||||
def is_channel_private_key(self, private_key: PrivateKey):
|
||||
return self.claim.channel.public_key_bytes == private_key.public_key.pubkey_bytes
|
||||
def is_channel_private_key(self, private_key):
|
||||
return self.claim.channel.public_key_bytes == private_key.get_verifying_key().to_der()
|
||||
|
||||
@classmethod
|
||||
def pay_claim_name_pubkey_hash(
|
||||
|
|
|
@ -203,12 +203,11 @@ class Wallet:
|
|||
return True
|
||||
return False
|
||||
|
||||
async def unlock(self, password):
|
||||
def unlock(self, password):
|
||||
for account in self.accounts:
|
||||
if account.encrypted:
|
||||
if not account.decrypt(password):
|
||||
return False
|
||||
await account.deterministic_channel_keys.ensure_cache_primed()
|
||||
self.encryption_password = password
|
||||
return True
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ ignore=words,server,rpc,schema,winpaths.py,migrator,undecorated.py
|
|||
max-parents=10
|
||||
max-args=10
|
||||
max-line-length=120
|
||||
good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv,ts,l,pk
|
||||
good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv,ts,l
|
||||
valid-metaclass-classmethod-first-arg=mcs
|
||||
disable=
|
||||
c-extension-no-member,
|
||||
|
|
2
setup.py
2
setup.py
|
@ -63,7 +63,7 @@ setup(
|
|||
'torrent': ['lbry-libtorrent'],
|
||||
'lint': ['pylint==2.10.0'],
|
||||
'test': ['coverage'],
|
||||
'scribe': ['scribe @ git+https://github.com/lbryio/scribe.git'],
|
||||
'scribe': ['scribe @ git+https://github.com/lbryio/hub.git@dfda41f85fb3e2ad909a1493d389cfdad5eaa5c1'],
|
||||
},
|
||||
classifiers=[
|
||||
'Framework :: AsyncIO',
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
from binascii import unhexlify
|
||||
|
||||
from lbry.testcase import CommandTestCase
|
||||
from lbry.wallet.dewies import dewies_to_lbc
|
||||
from lbry.wallet.account import DeterministicChannelKeyManager
|
||||
|
||||
|
||||
def extract(d, keys):
|
||||
|
@ -63,30 +60,15 @@ class AccountManagement(CommandTestCase):
|
|||
self.assertEqual(accounts['items'][0]['name'], 'recreated account')
|
||||
|
||||
async def test_wallet_migration(self):
|
||||
old_id, new_id, valid_key = (
|
||||
'mi9E8KqFfW5ngktU22pN2jpgsdf81ZbsGY',
|
||||
'mqs77XbdnuxWN4cXrjKbSoGLkvAHa4f4B8',
|
||||
'-----BEGIN EC PRIVATE KEY-----\nMHQCAQEEIBZRTZ7tHnYCH3IE9mCo95'
|
||||
'466L/ShYFhXGrjmSMFJw8eoAcGBSuBBAAK\noUQDQgAEmucoPz9nI+ChZrfhnh'
|
||||
'0RZ/bcX0r2G0pYBmoNKovtKzXGa8y07D66MWsW\nqXptakqO/9KddIkBu5eJNS'
|
||||
'UZzQCxPQ==\n-----END EC PRIVATE KEY-----\n'
|
||||
)
|
||||
# null certificates should get deleted
|
||||
self.account.channel_keys = {
|
||||
new_id: 'not valid key',
|
||||
'foo': 'bar',
|
||||
}
|
||||
await self.channel_create('@foo1')
|
||||
await self.channel_create('@foo2')
|
||||
await self.channel_create('@foo3')
|
||||
keys = list(self.account.channel_keys.keys())
|
||||
self.account.channel_keys[keys[0]] = None
|
||||
self.account.channel_keys[keys[1]] = "some invalid junk"
|
||||
await self.account.maybe_migrate_certificates()
|
||||
self.assertEqual(self.account.channel_keys, {})
|
||||
self.account.channel_keys = {
|
||||
new_id: 'not valid key',
|
||||
'foo': 'bar',
|
||||
'invalid address': valid_key,
|
||||
}
|
||||
await self.account.maybe_migrate_certificates()
|
||||
self.assertEqual(self.account.channel_keys, {
|
||||
new_id: valid_key
|
||||
})
|
||||
self.assertEqual(list(self.account.channel_keys.keys()), [keys[2]])
|
||||
|
||||
async def assertFindsClaims(self, claim_names, awaitable):
|
||||
self.assertEqual(claim_names, [txo.claim_name for txo in (await awaitable)['items']])
|
||||
|
@ -192,100 +174,3 @@ class AccountManagement(CommandTestCase):
|
|||
bad_address = address[0:20] + '9999999' + address[27:]
|
||||
with self.assertRaisesRegex(Exception, f"'{bad_address}' is not a valid address"):
|
||||
await self.daemon.jsonrpc_account_send('0.1', addresses=[bad_address])
|
||||
|
||||
async def test_hybrid_channel_keys(self):
|
||||
# non-deterministic channel
|
||||
self.account.channel_keys = {
|
||||
'mqs77XbdnuxWN4cXrjKbSoGLkvAHa4f4B8':
|
||||
'-----BEGIN EC PRIVATE KEY-----\nMHQCAQEEIBZRTZ7tHnYCH3IE9mCo95'
|
||||
'466L/ShYFhXGrjmSMFJw8eoAcGBSuBBAAK\noUQDQgAEmucoPz9nI+ChZrfhnh'
|
||||
'0RZ/bcX0r2G0pYBmoNKovtKzXGa8y07D66MWsW\nqXptakqO/9KddIkBu5eJNS'
|
||||
'UZzQCxPQ==\n-----END EC PRIVATE KEY-----\n'
|
||||
}
|
||||
channel1 = await self.create_nondeterministic_channel('@foo1', '1.0', unhexlify(
|
||||
'3056301006072a8648ce3d020106052b8104000a034200049ae7283f3f6723e0a1'
|
||||
'66b7e19e1d1167f6dc5f4af61b4a58066a0d2a8bed2b35c66bccb4ec3eba316b16'
|
||||
'a97a6d6a4a8effd29d748901bb9789352519cd00b13d'
|
||||
))
|
||||
await self.confirm_tx(channel1['txid'])
|
||||
|
||||
# deterministic channel
|
||||
channel2 = await self.channel_create('@foo2')
|
||||
|
||||
await self.stream_create('stream-in-channel1', '0.01', channel_id=self.get_claim_id(channel1))
|
||||
await self.stream_create('stream-in-channel2', '0.01', channel_id=self.get_claim_id(channel2))
|
||||
|
||||
resolved_stream1 = await self.resolve('@foo1/stream-in-channel1')
|
||||
self.assertEqual('stream-in-channel1', resolved_stream1['name'])
|
||||
self.assertTrue(resolved_stream1['is_channel_signature_valid'])
|
||||
|
||||
resolved_stream2 = await self.resolve('@foo2/stream-in-channel2')
|
||||
self.assertEqual('stream-in-channel2', resolved_stream2['name'])
|
||||
self.assertTrue(resolved_stream2['is_channel_signature_valid'])
|
||||
|
||||
async def test_deterministic_channel_keys(self):
|
||||
seed = self.account.seed
|
||||
keys = self.account.deterministic_channel_keys
|
||||
|
||||
# create two channels and make sure they have different keys
|
||||
channel1a = await self.channel_create('@foo1')
|
||||
channel2a = await self.channel_create('@foo2')
|
||||
self.assertNotEqual(
|
||||
channel1a['outputs'][0]['value']['public_key'],
|
||||
channel2a['outputs'][0]['value']['public_key'],
|
||||
)
|
||||
|
||||
# start another daemon from the same seed
|
||||
self.daemon2 = await self.add_daemon(seed=seed)
|
||||
channel2b, channel1b = (await self.daemon2.jsonrpc_channel_list())['items']
|
||||
|
||||
# both daemons end up with the same channel signing keys automagically
|
||||
self.assertTrue(channel1b.has_private_key)
|
||||
self.assertEqual(
|
||||
channel1a['outputs'][0]['value']['public_key_id'],
|
||||
channel1b.private_key.address
|
||||
)
|
||||
self.assertTrue(channel2b.has_private_key)
|
||||
self.assertEqual(
|
||||
channel2a['outputs'][0]['value']['public_key_id'],
|
||||
channel2b.private_key.address
|
||||
)
|
||||
|
||||
# repeatedly calling next channel key returns the same key when not used
|
||||
current_known = keys.last_known
|
||||
next_key = await keys.generate_next_key()
|
||||
self.assertEqual(current_known, keys.last_known)
|
||||
self.assertEqual(next_key.address, (await keys.generate_next_key()).address)
|
||||
# again, should be idempotent
|
||||
next_key = await keys.generate_next_key()
|
||||
self.assertEqual(current_known, keys.last_known)
|
||||
self.assertEqual(next_key.address, (await keys.generate_next_key()).address)
|
||||
|
||||
# create third channel while both daemons running, second daemon should pick it up
|
||||
channel3a = await self.channel_create('@foo3')
|
||||
self.assertEqual(current_known+1, keys.last_known)
|
||||
self.assertNotEqual(next_key.address, (await keys.generate_next_key()).address)
|
||||
channel3b, = (await self.daemon2.jsonrpc_channel_list(name='@foo3'))['items']
|
||||
self.assertTrue(channel3b.has_private_key)
|
||||
self.assertEqual(
|
||||
channel3a['outputs'][0]['value']['public_key_id'],
|
||||
channel3b.private_key.address
|
||||
)
|
||||
|
||||
# channel key cache re-populated after simulated restart
|
||||
|
||||
# reset cache
|
||||
self.account.deterministic_channel_keys = DeterministicChannelKeyManager(self.account)
|
||||
channel3c, channel2c, channel1c = (await self.daemon.jsonrpc_channel_list())['items']
|
||||
self.assertFalse(channel1c.has_private_key)
|
||||
self.assertFalse(channel2c.has_private_key)
|
||||
self.assertFalse(channel3c.has_private_key)
|
||||
|
||||
# repopulate cache
|
||||
await self.account.deterministic_channel_keys.ensure_cache_primed()
|
||||
self.assertEqual(self.account.deterministic_channel_keys.last_known, keys.last_known)
|
||||
channel3c, channel2c, channel1c = (await self.daemon.jsonrpc_channel_list())['items']
|
||||
self.assertTrue(channel1c.has_private_key)
|
||||
self.assertTrue(channel2c.has_private_key)
|
||||
self.assertTrue(channel3c.has_private_key)
|
||||
|
||||
|
|
|
@ -3,9 +3,10 @@ import scribe
|
|||
|
||||
from unittest.mock import Mock
|
||||
|
||||
from scribe.blockchain.network import LBCRegTest
|
||||
from scribe.hub import HUB_PROTOCOL_VERSION
|
||||
from scribe.hub.udp import StatusServer
|
||||
from scribe.hub.session import LBRYElectrumX
|
||||
from scribe.blockchain.network import LBCRegTest
|
||||
|
||||
from lbry.wallet.network import Network
|
||||
from lbry.wallet.orchstr8 import Conductor
|
||||
|
@ -35,7 +36,7 @@ class NetworkTests(IntegrationTestCase):
|
|||
'payment_address': '',
|
||||
'donation_address': '',
|
||||
'daily_fee': '0',
|
||||
'server_version': scribe.__version__,
|
||||
'server_version': HUB_PROTOCOL_VERSION,
|
||||
'trending_algorithm': 'fast_ar',
|
||||
}, await self.ledger.network.get_server_features())
|
||||
# await self.conductor.spv_node.stop()
|
||||
|
@ -65,7 +66,7 @@ class NetworkTests(IntegrationTestCase):
|
|||
'payment_address': payment_address,
|
||||
'donation_address': donation_address,
|
||||
'daily_fee': '42',
|
||||
'server_version': scribe.__version__,
|
||||
'server_version': HUB_PROTOCOL_VERSION,
|
||||
'trending_algorithm': 'fast_ar',
|
||||
}, await self.ledger.network.get_server_features())
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import asyncio
|
||||
import json
|
||||
from binascii import unhexlify
|
||||
|
||||
from lbry.wallet import ENCRYPT_ON_DISK
|
||||
from lbry.error import InvalidPasswordError
|
||||
|
@ -282,19 +281,8 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
)
|
||||
|
||||
# Channel Certificate
|
||||
# non-deterministic channel
|
||||
self.daemon2.wallet_manager.default_account.channel_keys['mqs77XbdnuxWN4cXrjKbSoGLkvAHa4f4B8'] = (
|
||||
'-----BEGIN EC PRIVATE KEY-----\nMHQCAQEEIBZRTZ7tHnYCH3IE9mCo95'
|
||||
'466L/ShYFhXGrjmSMFJw8eoAcGBSuBBAAK\noUQDQgAEmucoPz9nI+ChZrfhnh'
|
||||
'0RZ/bcX0r2G0pYBmoNKovtKzXGa8y07D66MWsW\nqXptakqO/9KddIkBu5eJNS'
|
||||
'UZzQCxPQ==\n-----END EC PRIVATE KEY-----\n'
|
||||
)
|
||||
channel = await self.create_nondeterministic_channel('@foo', '0.1', unhexlify(
|
||||
'3056301006072a8648ce3d020106052b8104000a034200049ae7283f3f6723e0a1'
|
||||
'66b7e19e1d1167f6dc5f4af61b4a58066a0d2a8bed2b35c66bccb4ec3eba316b16'
|
||||
'a97a6d6a4a8effd29d748901bb9789352519cd00b13d'
|
||||
), self.daemon2, blocking=True)
|
||||
await self.confirm_tx(channel['txid'], self.daemon2.ledger)
|
||||
channel = await daemon2.jsonrpc_channel_create('@foo', '0.1')
|
||||
await self.confirm_tx(channel.id, self.daemon2.ledger)
|
||||
|
||||
# both daemons will have the channel but only one has the cert so far
|
||||
self.assertItemCount(await daemon.jsonrpc_channel_list(), 1)
|
||||
|
@ -326,7 +314,7 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
with self.assertRaisesRegex(AssertionError, "Cannot lock an unencrypted wallet, encrypt first."):
|
||||
daemon.jsonrpc_wallet_lock()
|
||||
# safe to call unlock and decrypt, they are no-ops at this point
|
||||
await daemon.jsonrpc_wallet_unlock('password') # already unlocked
|
||||
daemon.jsonrpc_wallet_unlock('password') # already unlocked
|
||||
daemon.jsonrpc_wallet_decrypt() # already not encrypted
|
||||
|
||||
daemon.jsonrpc_wallet_encrypt('password')
|
||||
|
@ -342,7 +330,7 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
# can't sign transactions with locked wallet
|
||||
with self.assertRaises(AssertionError):
|
||||
await daemon.jsonrpc_channel_create('@foo', '1.0')
|
||||
await daemon.jsonrpc_wallet_unlock('password')
|
||||
daemon.jsonrpc_wallet_unlock('password')
|
||||
self.assertEqual(daemon.jsonrpc_wallet_status(), {'is_locked': False, 'is_encrypted': True,
|
||||
'is_syncing': False})
|
||||
await daemon.jsonrpc_channel_create('@foo', '1.0')
|
||||
|
@ -360,17 +348,10 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
await daemon2.jsonrpc_channel_import(exported)
|
||||
self.assertTrue(daemon2.jsonrpc_wallet_encrypt('password'))
|
||||
self.assertTrue(daemon2.jsonrpc_wallet_lock())
|
||||
self.assertTrue(await daemon2.jsonrpc_wallet_unlock("password"))
|
||||
self.assertTrue(daemon2.jsonrpc_wallet_unlock("password"))
|
||||
self.assertEqual(daemon2.jsonrpc_wallet_status(),
|
||||
{'is_locked': False, 'is_encrypted': True, 'is_syncing': False})
|
||||
|
||||
async def test_locking_unlocking_does_not_break_deterministic_channels(self):
|
||||
self.assertTrue(self.daemon.jsonrpc_wallet_encrypt("password"))
|
||||
self.assertTrue(self.daemon.jsonrpc_wallet_lock())
|
||||
self.account.deterministic_channel_keys._private_key = None
|
||||
self.assertTrue(await self.daemon.jsonrpc_wallet_unlock("password"))
|
||||
await self.channel_create()
|
||||
|
||||
async def test_sync_with_encryption_and_password_change(self):
|
||||
daemon, daemon2 = self.daemon, self.daemon2
|
||||
wallet, wallet2 = daemon.wallet_manager.default_wallet, daemon2.wallet_manager.default_wallet
|
||||
|
@ -397,8 +378,8 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
|
||||
# check new password is active
|
||||
daemon.jsonrpc_wallet_lock()
|
||||
self.assertFalse(await daemon.jsonrpc_wallet_unlock('password'))
|
||||
self.assertTrue(await daemon.jsonrpc_wallet_unlock('password2'))
|
||||
self.assertFalse(daemon.jsonrpc_wallet_unlock('password'))
|
||||
self.assertTrue(daemon.jsonrpc_wallet_unlock('password2'))
|
||||
|
||||
# propagate disk encryption to daemon2
|
||||
data = await daemon.jsonrpc_sync_apply('password3')
|
||||
|
@ -414,4 +395,4 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
|
|||
self.assertWalletEncrypted(wallet2.storage.path, True)
|
||||
|
||||
daemon2.jsonrpc_wallet_lock()
|
||||
self.assertTrue(await daemon2.jsonrpc_wallet_unlock('password3'))
|
||||
self.assertTrue(daemon2.jsonrpc_wallet_unlock('password3'))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import asyncio
|
||||
|
||||
import scribe
|
||||
from scribe.hub import HUB_PROTOCOL_VERSION
|
||||
from scribe.hub.session import LBRYElectrumX
|
||||
|
||||
from lbry.error import ServerPaymentFeeAboveMaxAllowedError
|
||||
|
@ -34,7 +34,7 @@ class TestSessions(IntegrationTestCase):
|
|||
|
||||
async def test_proper_version(self):
|
||||
info = await self.ledger.network.get_server_features()
|
||||
self.assertEqual(scribe.__version__, info['server_version'])
|
||||
self.assertEqual(HUB_PROTOCOL_VERSION, info['server_version'])
|
||||
|
||||
async def test_client_errors(self):
|
||||
# Goal is ensuring thsoe are raised and not trapped accidentally
|
||||
|
|
|
@ -18,6 +18,12 @@ from lbry.crypto.hash import sha256
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_encoded_signature(signature):
|
||||
signature = signature.encode() if isinstance(signature, str) else signature
|
||||
r = int(signature[:int(len(signature) / 2)], 16)
|
||||
s = int(signature[int(len(signature) / 2):], 16)
|
||||
return ecdsa.util.sigencode_der(r, s, len(signature) * 4)
|
||||
|
||||
|
||||
STREAM_TYPES = {
|
||||
'video': 1,
|
||||
|
@ -36,7 +42,7 @@ def verify(channel, data, signature, channel_hash=None):
|
|||
data
|
||||
]
|
||||
return Output.is_signature_valid(
|
||||
unhexlify(signature['signature']),
|
||||
get_encoded_signature(signature['signature']),
|
||||
sha256(b''.join(pieces)),
|
||||
channel.claim.channel.public_key_bytes
|
||||
)
|
||||
|
@ -148,10 +154,10 @@ class ClaimSearchCommand(ClaimTestCase):
|
|||
await self.assertFindsClaim(self.channel, txid=self.channel['txid'], nout=0)
|
||||
await self.assertFindsClaim(channel2, claim_id=channel_id2)
|
||||
await self.assertFindsClaim(channel2, txid=channel2['txid'], nout=0)
|
||||
await self.assertFindsClaim(
|
||||
channel2, public_key_id=channel_txo2['value']['public_key_id'])
|
||||
await self.assertFindsClaim(
|
||||
self.channel, public_key_id=channel_txo['value']['public_key_id'])
|
||||
#await self.assertFindsClaim(
|
||||
# channel2, public_key_id=self.ledger.public_key_to_address(channel_txo2['value']['public_key_id']))
|
||||
#await self.assertFindsClaim(
|
||||
# self.channel, public_key_id=channel_txo['value']['public_key_id'])
|
||||
|
||||
signed = await self.stream_create('on-channel-claim', '0.001', channel_id=self.channel_id)
|
||||
signed2 = await self.stream_create('on-channel-claim', '0.0001', channel_id=channel_id2,
|
||||
|
@ -1129,17 +1135,17 @@ class ChannelCommands(CommandTestCase):
|
|||
tx = await self.channel_update(claim_id, bid='4.0')
|
||||
self.assertEqual(tx['outputs'][0]['amount'], '4.0')
|
||||
|
||||
await self.assertBalance(self.account, '5.991503')
|
||||
await self.assertBalance(self.account, '5.991447')
|
||||
|
||||
# not enough funds
|
||||
with self.assertRaisesRegex(
|
||||
InsufficientFundsError, "Not enough funds to cover this transaction."):
|
||||
await self.channel_create('@foo2', '9.0')
|
||||
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
|
||||
await self.assertBalance(self.account, '5.991503')
|
||||
await self.assertBalance(self.account, '5.991447')
|
||||
|
||||
# spend exactly amount available, no change
|
||||
tx = await self.channel_create('@foo3', '5.981322')
|
||||
tx = await self.channel_create('@foo3', '5.981266')
|
||||
await self.assertBalance(self.account, '0.0')
|
||||
self.assertEqual(len(tx['outputs']), 1) # no change
|
||||
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 2)
|
||||
|
@ -1257,7 +1263,7 @@ class ChannelCommands(CommandTestCase):
|
|||
await daemon2.jsonrpc_channel_import(exported_data)
|
||||
channels = (await daemon2.jsonrpc_channel_list())['items']
|
||||
self.assertEqual(1, len(channels))
|
||||
self.assertEqual(channel_private_key.private_key_bytes, channels[0].private_key.private_key_bytes)
|
||||
self.assertEqual(channel_private_key.to_string(), channels[0].private_key.to_string())
|
||||
|
||||
# second wallet can't update until channel is sent to it
|
||||
with self.assertRaisesRegex(AssertionError, 'Cannot find private key for signing output.'):
|
||||
|
|
|
@ -572,13 +572,18 @@ class DiskSpaceManagement(CommandTestCase):
|
|||
self.assertTrue(blobs2.issubset(blobs))
|
||||
self.assertFalse(blobs3.issubset(blobs))
|
||||
self.assertTrue(blobs4.issubset(blobs))
|
||||
# check that pending blobs are not accounted (#3617)
|
||||
await self.daemon.storage.db.execute_fetchall("update blob set status='pending'")
|
||||
await self.blob_clean() # just to refresh caches, has no effect
|
||||
self.assertEqual(0, (await self.status())['disk_space']['total_used_mb'])
|
||||
self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb'])
|
||||
self.assertEqual(0, (await self.status())['disk_space']['published_blobs_storage_used_mb'])
|
||||
# check that added_on gets set on downloads (was a bug)
|
||||
self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob"))
|
||||
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
||||
await self.daemon.jsonrpc_get("foo4", save_file=False)
|
||||
self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob"))
|
||||
|
||||
|
||||
class TestBackgroundDownloaderComponent(CommandTestCase):
|
||||
async def get_blobs_from_sd_blob(self, sd_blob):
|
||||
descriptor = await StreamDescriptor.from_stream_descriptor_blob(
|
||||
|
|
|
@ -24,10 +24,10 @@ class TestExchangeRateManager(AsyncioTestCase):
|
|||
self.assertLessEqual(len(failures), 1, f"feed failures: {failures}. Please check exchange rate feeds!")
|
||||
lbc = manager.convert_currency('USD', 'LBC', Decimal('1.0'))
|
||||
self.assertGreaterEqual(lbc, 2.0)
|
||||
self.assertLessEqual(lbc, 60.0)
|
||||
self.assertLessEqual(lbc, 80.0)
|
||||
lbc = manager.convert_currency('BTC', 'LBC', Decimal('0.01'))
|
||||
self.assertGreaterEqual(lbc, 1_000)
|
||||
self.assertLessEqual(lbc, 20_000)
|
||||
self.assertLessEqual(lbc, 30_000)
|
||||
|
||||
async def test_it_handles_feed_being_offline(self):
|
||||
class FakeFeed(MarketFeed):
|
||||
|
|
|
@ -1796,7 +1796,7 @@ def generate_signed_legacy(address: bytes, output: Output):
|
|||
claim.SerializeToString(),
|
||||
output.claim_hash[::-1]
|
||||
]))
|
||||
signature = output.private_key.sign_compact(digest)
|
||||
signature = output.private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256)
|
||||
claim.publisherSignature.version = 1
|
||||
claim.publisherSignature.signatureType = 1
|
||||
claim.publisherSignature.signature = signature
|
||||
|
|
|
@ -31,9 +31,7 @@ class BasicTransactionTest(IntegrationTestCase):
|
|||
channel_txo = Output.pay_claim_name_pubkey_hash(
|
||||
l2d('1.0'), '@bar', channel, self.account.ledger.address_to_hash160(address1)
|
||||
)
|
||||
channel_txo.set_channel_private_key(
|
||||
await self.account.generate_channel_private_key()
|
||||
)
|
||||
await channel_txo.generate_channel_private_key()
|
||||
channel_txo.script.generate()
|
||||
channel_tx = await Transaction.create([], [channel_txo], [self.account], self.account)
|
||||
|
||||
|
|
|
@ -138,7 +138,9 @@ class TestTypesV1Compatibility(TestCase):
|
|||
channel = cert.channel
|
||||
self.assertEqual(
|
||||
channel.public_key,
|
||||
'033878b1edd4a1373149909ef03f4339f6da9c2bd2214c040fd2e530463ffe6609'
|
||||
'3056301006072a8648ce3d020106052b8104000a034200043878b1edd4a1373149909ef03f4339f6da9c2b'
|
||||
'd2214c040fd2e530463ffe66098eca14fc70b50ff3aefd106049a815f595ed5a13eda7419ad78d9ed7ae47'
|
||||
'3f17'
|
||||
)
|
||||
|
||||
def test_unsigned_with_fee(self):
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
import asyncio
|
||||
from binascii import hexlify
|
||||
from lbry.testcase import AsyncioTestCase
|
||||
from lbry.wallet import (
|
||||
Wallet, Ledger, Database, Headers,
|
||||
Account, SingleKey, HierarchicalDeterministic,
|
||||
DeterministicChannelKeyManager
|
||||
)
|
||||
from lbry.wallet import Wallet, Ledger, Database, Headers, Account, SingleKey, HierarchicalDeterministic
|
||||
|
||||
|
||||
class TestAccount(AsyncioTestCase):
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from binascii import unhexlify, hexlify
|
||||
|
||||
from lbry.testcase import AsyncioTestCase
|
||||
from lbry.wallet.bip32 import PublicKey, PrivateKey, from_extended_key_string
|
||||
from lbry.wallet.bip32 import PubKey, PrivateKey, from_extended_key_string
|
||||
from lbry.wallet import Ledger, Database, Headers
|
||||
|
||||
from tests.unit.wallet.key_fixtures import expected_ids, expected_privkeys, expected_hardened_privkeys
|
||||
|
@ -11,24 +11,24 @@ class BIP32Tests(AsyncioTestCase):
|
|||
|
||||
def test_pubkey_validation(self):
|
||||
with self.assertRaisesRegex(TypeError, 'chain code must be raw bytes'):
|
||||
PublicKey(None, None, 1, None, None, None)
|
||||
PubKey(None, None, 1, None, None, None)
|
||||
with self.assertRaisesRegex(ValueError, 'invalid chain code'):
|
||||
PublicKey(None, None, b'abcd', None, None, None)
|
||||
PubKey(None, None, b'abcd', None, None, None)
|
||||
with self.assertRaisesRegex(ValueError, 'invalid child number'):
|
||||
PublicKey(None, None, b'abcd'*8, -1, None, None)
|
||||
PubKey(None, None, b'abcd'*8, -1, None, None)
|
||||
with self.assertRaisesRegex(ValueError, 'invalid depth'):
|
||||
PublicKey(None, None, b'abcd'*8, 0, 256, None)
|
||||
PubKey(None, None, b'abcd'*8, 0, 256, None)
|
||||
with self.assertRaisesRegex(TypeError, 'pubkey must be raw bytes'):
|
||||
PublicKey(None, None, b'abcd'*8, 0, 255, None)
|
||||
PubKey(None, None, b'abcd'*8, 0, 255, None)
|
||||
with self.assertRaisesRegex(ValueError, 'pubkey must be 33 bytes'):
|
||||
PublicKey(None, b'abcd', b'abcd'*8, 0, 255, None)
|
||||
PubKey(None, b'abcd', b'abcd'*8, 0, 255, None)
|
||||
with self.assertRaisesRegex(ValueError, 'invalid pubkey prefix byte'):
|
||||
PublicKey(
|
||||
PubKey(
|
||||
None,
|
||||
unhexlify('33d1a3dc8155673bc1e2214fa493ccc82d57961b66054af9b6b653ac28eeef3ffe'),
|
||||
b'abcd'*8, 0, 255, None
|
||||
)
|
||||
pubkey = PublicKey( # success
|
||||
pubkey = PubKey( # success
|
||||
None,
|
||||
unhexlify('03d1a3dc8155673bc1e2214fa493ccc82d57961b66054af9b6b653ac28eeef3ffe'),
|
||||
b'abcd'*8, 0, 1, None
|
||||
|
@ -37,7 +37,7 @@ class BIP32Tests(AsyncioTestCase):
|
|||
pubkey.child(-1)
|
||||
for i in range(20):
|
||||
new_key = pubkey.child(i)
|
||||
self.assertIsInstance(new_key, PublicKey)
|
||||
self.assertIsInstance(new_key, PubKey)
|
||||
self.assertEqual(hexlify(new_key.identifier()), expected_ids[i])
|
||||
|
||||
async def test_private_key_validation(self):
|
||||
|
@ -60,7 +60,7 @@ class BIP32Tests(AsyncioTestCase):
|
|||
self.assertEqual(
|
||||
ec_point[1], 86198965946979720220333266272536217633917099472454294641561154971209433250106
|
||||
)
|
||||
self.assertEqual('bUDcmraBp2zCV3QWmVVeQaEgepbs1b2gC9', private_key.address)
|
||||
self.assertEqual('bUDcmraBp2zCV3QWmVVeQaEgepbs1b2gC9', private_key.address())
|
||||
with self.assertRaisesRegex(ValueError, 'invalid BIP32 private key child number'):
|
||||
private_key.child(-1)
|
||||
self.assertIsInstance(private_key.child(PrivateKey.HARDENED), PrivateKey)
|
||||
|
@ -100,5 +100,5 @@ class BIP32Tests(AsyncioTestCase):
|
|||
ledger,
|
||||
'xpub661MyMwAqRbcF84AR8yfHoMzf4S2ct6mPJtvBtvNeyN9hBHuZ6uGJszkTSn5fQUCdz3XU17eBzFeAUwV6f'
|
||||
'iW44g14WF52fYC5J483wqQ5ZP',
|
||||
), PublicKey
|
||||
), PubKey
|
||||
)
|
||||
|
|
|
@ -2,13 +2,10 @@ from binascii import unhexlify
|
|||
|
||||
from lbry.testcase import AsyncioTestCase
|
||||
from lbry.wallet.constants import CENT, NULL_HASH32
|
||||
from lbry.wallet.bip32 import PrivateKey, KeyPath
|
||||
from lbry.wallet.mnemonic import Mnemonic
|
||||
from lbry.wallet import Ledger, Database, Headers, Transaction, Input, Output
|
||||
from lbry.schema.claim import Claim
|
||||
from lbry.crypto.hash import sha256
|
||||
|
||||
|
||||
def get_output(amount=CENT, pubkey_hash=NULL_HASH32):
|
||||
return Transaction() \
|
||||
.add_outputs([Output.pay_pubkey_hash(amount, pubkey_hash)]) \
|
||||
|
@ -24,11 +21,8 @@ def get_tx():
|
|||
|
||||
|
||||
async def get_channel(claim_name='@foo'):
|
||||
seed = Mnemonic.mnemonic_to_seed(Mnemonic().make_seed(), '')
|
||||
key = PrivateKey.from_seed(Ledger, seed)
|
||||
channel_key = key.child(KeyPath.CHANNEL).child(0)
|
||||
channel_txo = Output.pay_claim_name_pubkey_hash(CENT, claim_name, Claim(), b'abc')
|
||||
channel_txo.set_channel_private_key(channel_key)
|
||||
await channel_txo.generate_channel_private_key()
|
||||
get_tx().add_outputs([channel_txo])
|
||||
return channel_txo
|
||||
|
||||
|
@ -120,109 +114,6 @@ class TestValidatingOldSignatures(AsyncioTestCase):
|
|||
|
||||
self.assertTrue(stream.is_signed_by(channel, ledger))
|
||||
|
||||
def test_another_signed_claim_made_by_ytsync(self):
|
||||
stream_tx = Transaction(unhexlify(
|
||||
b'010000000185870fabdd6bd2d57749afebc0b239e8d0ebeb6f3647d6cfcabd5ea2200ac632010000006b4'
|
||||
b'83045022100877c86de154e39f21959bc2157865071924adb7930a7a8910714f27398cd2689022074270f'
|
||||
b'074ae260fff319d5e0c030691821bc75b82ff0179898ac3eaeda4123eb01210200328f7f001f22ea25d72'
|
||||
b'ba37379e3065020c4d8371d9199dc4e3770084e26b9ffffffff0240420f0000000000fdcc05b527746865'
|
||||
b'2d637269746963616c2d6e6565642d666f722d696e646570656e64656e742d6d656469614d85050191bba'
|
||||
b'd064bdc455b9ebddeeb559686b13f027615384ec7c9d981c3c21a6e3d723a654e86bd707d21174c4f697f'
|
||||
b'5080cf367a3b2dfc059e6cc14a962631df69b9886f4d8b97cb339b14633966fd5ac7d75edacdf30ac5010'
|
||||
b'a90010a304af34d1c1467ebfc8785e2a49c7d5bec3cc6db94db858f1dcf95e4256564fba586d6e01f496d'
|
||||
b'f2a34344e021d2725ffd12197468652d637269746963616c2d6e6565642d666f722e6d703418ee97eac10'
|
||||
b'22209766964656f2f6d70343230ba13e6b667a9acef7e1b1caa88b9eb1d4680dea84b1d3e838266595805'
|
||||
b'ab3343855c20af35012f942ce0d5111ce080331a1f436f7079726967687465642028636f6e74616374207'
|
||||
b'075626c69736865722928e2e3c98d065a0908800f10b80818f314423954686520437269746963616c204e'
|
||||
b'65656420666f7220496e646570656e64656e74204d65646961207c20476c656e6e20477265656e77616c6'
|
||||
b'44af006496e636c7564657320616e20696e74726f64756374696f6e20627920546f6d20576f6f64732e20'
|
||||
b'5265636f7264656420696e204c616b65204a61636b736f6e2c2054657861732c206f6e20446563656d626'
|
||||
b'57220342c20323032312e0a0a526f6e205061756c27732074776f2063616d706169676e7320666f722070'
|
||||
b'7265736964656e7420283230303820616e64203230313229207765726520776174657273686564206d6f6'
|
||||
b'd656e747320666f72206c6962657274792d6d696e6465642070656f706c652061726f756e642074686520'
|
||||
b'776f726c642e205468652022526f6e205061756c205265766f6c7574696f6e22e2809463656e746572656'
|
||||
b'42061726f756e642068697320756e64696c75746564206d657373616765206f662070656163652c207072'
|
||||
b'6f70657274792c20616e64206d61726b657473e280946368616e6765642074686520776179206d696c6c6'
|
||||
b'96f6e732074686f756768742061626f75742074686520416d65726963616e20656d7069726520616e6420'
|
||||
b'74686520416d65726963616e2066696e616e6369616c2073797374656d2e2044722e205061756c2773206'
|
||||
b'66f637573206f6e2063656e7472616c2062616e6b696e6720616e6420666f726569676e20706f6c696379'
|
||||
b'2063617567687420706f6c6974696369616e7320616e642070756e64697473206f66662067756172642c2'
|
||||
b'0666f7263696e67207468656d20746f20736372616d626c6520666f72206578706c616e6174696f6e7320'
|
||||
b'6f66206f7572204d6964646c65204561737420706f6c69637920616e6420536f766965742d7374796c652'
|
||||
b'063656e7472616c20706c616e6e696e6720617420746865204665642e20506f6c697469637320696e2041'
|
||||
b'6d657269636120686173206e6f74206265656e207468652073616d652073696e636520746865202247697'
|
||||
b'56c69616e69206d6f6d656e742220616e642022456e6420746865204665642e222054686520526f6e2050'
|
||||
b'61756c205265766f6c7574696f6e2077617320626f7468206120706f6c69746963616c20616e642063756'
|
||||
b'c747572616c207068656e6f6d656e6f6e2e0a0a303a303020496e74726f64756374696f6e20627920546f'
|
||||
b'6d20576f6f64730a343a323720476c656e6e20477265656e77616c640a2e2e2e0a68747470733a2f2f777'
|
||||
b'7772e796f75747562652e636f6d2f77617463683f763d4e4b70706d52467673453052292a276874747073'
|
||||
b'3a2f2f7468756d626e61696c732e6c6272792e636f6d2f4e4b70706d5246767345305a046e6577735a096'
|
||||
b'3617468656472616c5a0f636f72706f72617465206d656469615a08637269746963616c5a0f676c656e6e'
|
||||
b'20677265656e77616c645a0b696e646570656e64656e745a0a6a6f75726e616c69736d5a056d656469615'
|
||||
b'a056d697365735a08706f6c69746963735a0a70726f706167616e64615a08726f6e207061756c5a057472'
|
||||
b'757468620208016d7576a9140969964db5b5744e2d2d0de797f5904efc80d02188acc8814200000000001'
|
||||
b'976a91439086597f9cfc066f4749b8bb245bf561714fda888ac00000000'
|
||||
))
|
||||
stream = stream_tx.outputs[0]
|
||||
|
||||
channel_tx = Transaction(unhexlify(
|
||||
b'01000000011d47b91b409b317e427adb87ec4b0bfc9fad2abf6ec3296f41918e4b3cb9d4e7010000006a4'
|
||||
b'7304402205e53ef7fc643ed00f0240dd1c3302b82141f481ed071cbcdd6b6ec6166ffd4e002203eb28ce6'
|
||||
b'39f80253f66ff3bf45288a60133d7f5625217d1ecf3b57da440b559f012103b852d61074eb995b702a800'
|
||||
b'f284e937ece4fea7f023beb70e6b0d1bff36d64b9ffffffff0240420f0000000000fdde01b506406d6973'
|
||||
b'65734db801001299010a583056301006072a8648ce3d020106052b8104000a034200047ddb1d639d7bdd0'
|
||||
b'953d9ab0bf9e971a632f85f9823c1d85780aa3e0a702b503c2962d00f67360e803514bf5864710925aacb'
|
||||
b'effd9597532c7e60eb21b4e3fd03223d2a3b68747470733a2f2f7468756d626e61696c732e6c6272792e6'
|
||||
b'36f6d2f62616e6e65722d55436d54362d43684b7061694956753266684549734e7451420a6d697365736d'
|
||||
b'656469614ad401466561747572656420766964656f732066726f6d20746865204d6973657320496e73746'
|
||||
b'9747574652e20546865204d6973657320496e737469747574652070726f6d6f7465732041757374726961'
|
||||
b'6e2065636f6e6f6d6963732c2066726565646f6d2c20616e6420706561636520696e20746865206c69626'
|
||||
b'572616c20696e74656c6c65637475616c20747261646974696f6e206f66204c756477696720766f6e204d'
|
||||
b'69736573207468726f7567682072657365617263682c207075626c697368696e672c20616e64206564756'
|
||||
b'36174696f6e2e52362a3468747470733a2f2f7468756d626e61696c732e6c6272792e636f6d2f55436d54'
|
||||
b'362d43684b7061694956753266684549734e74516d7576a914cd77ded2400e6569f03a2580244bb395f95'
|
||||
b'f91fc88ac344ab701000000001976a914cabdbfce726d2fda92ffe0041a4303f6c6c34cda88ac00000000'
|
||||
))
|
||||
channel = channel_tx.outputs[0]
|
||||
|
||||
ledger = Ledger({
|
||||
'db': Database(':memory:'),
|
||||
'headers': Headers(':memory:')
|
||||
})
|
||||
|
||||
self.assertTrue(stream.is_signed_by(channel, ledger))
|
||||
|
||||
def test_claim_signed_using_ecdsa_validates_with_coincurve(self):
|
||||
channel_tx = Transaction(unhexlify(
|
||||
"0100000001b91d829283c0d80cb8113d5f36b6da3dfe9df3e783f158bfb3fd1b2b178d7fc9010000006b48"
|
||||
"3045022100f4e2b4ee38388c3d3a62f4b12fdd413f6f140168e85884bbeb33a3f2d3159ef502201721200f"
|
||||
"4a4f3b87484d4f47c9054e31cd3ba451dd3886a7f9f854893e7c8cf90121023f9e906e0c120f3bf74feb40"
|
||||
"f01ddeafbeb1856d91938c3bef25bed06767247cffffffff0200e1f5050000000081b505406368616e4c5d"
|
||||
"00125a0a583056301006072a8648ce3d020106052b8104000a03420004d7fa13fd8e57f3a0b878eaaf3d17"
|
||||
"9144d25ddbe4a3e4440a661f51b4134c6a13c9c98678ff8411932e60fd97d7baf03ea67ebcc21097230cfb"
|
||||
"2241348aadb55e6d7576a9149c6d700f89c77f0e8c650ba05656f8f2392782d388acf47c95350000000019"
|
||||
"76a914d9502233e0e1fc76e13e36c546f704c3124d5eaa88ac00000000"
|
||||
))
|
||||
channel = channel_tx.outputs[0]
|
||||
|
||||
stream_tx = Transaction(unhexlify(
|
||||
"010000000116a1d90763f2e3a2348c7fb438a23f232b15e3ffe3f058c3b2ab52c8bed8dcb5010000006b48"
|
||||
"30450221008f38561b3a16944c63b4f4f1562f1efe1b2060f31d249e234003ee5e3461756f02205773c99e"
|
||||
"83c968728e4f2433a13871c6ad23f6c10368ac52fa62a09f3f7ef5fd012102597f39845b98e2415b777aa0"
|
||||
"3849d346d287af7970deb05f11214b3418ae9d82ffffffff0200e1f50500000000fd0c01b505636c61696d"
|
||||
"4ce8012e6e40fa5fee1b915af3b55131dcbcebee34ab9148292b084ce3741f2e0db49783f3d854ac885f2b"
|
||||
"6304a76ef7048046e338dd414ba4c64e8468651768ffaaf550c8560637ac8c477ea481ac2a9264097240f4"
|
||||
"ab0a90010a8d010a3056bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454"
|
||||
"f4edd1373e2b64ee2e68350d916e120b746d706c69647879363171180322186170706c69636174696f6e2f"
|
||||
"6f637465742d73747265616d3230f293f5acf4310562d4a41f6620167fe6d83761a98d36738908ce5c8776"
|
||||
"1642710e55352a396276a42eda92ff5856f46f6d7576a91434bd3dc4c45cc0635eb2ad5da658727e5442ca"
|
||||
"0f88ace82f902f000000001976a91427b27c89eaebf68d063c107241584c07e5a6ccc688ac00000000"
|
||||
))
|
||||
stream = stream_tx.outputs[0]
|
||||
|
||||
ledger = Ledger({'db': Database(':memory:'), 'headers': Headers(':memory:')})
|
||||
self.assertTrue(stream.is_signed_by(channel, ledger))
|
||||
|
||||
|
||||
class TestValidateSignContent(AsyncioTestCase):
|
||||
|
||||
|
@ -230,10 +121,13 @@ class TestValidateSignContent(AsyncioTestCase):
|
|||
some_content = "MEANINGLESS CONTENT AEE3353320".encode()
|
||||
timestamp_str = "1630564175"
|
||||
channel = await get_channel()
|
||||
stream = get_stream()
|
||||
signature = channel.sign_data(some_content, timestamp_str)
|
||||
stream.signable.signature = unhexlify(signature.encode())
|
||||
encoded_signature = stream.get_encoded_signature()
|
||||
pieces = [timestamp_str.encode(), channel.claim_hash, some_content]
|
||||
self.assertTrue(Output.is_signature_valid(
|
||||
unhexlify(signature.encode()),
|
||||
encoded_signature,
|
||||
sha256(b''.join(pieces)),
|
||||
channel.claim.channel.public_key_bytes
|
||||
))
|
||||
|
|
Loading…
Reference in a new issue