pylint passes for lbry/wallet

This commit is contained in:
Lex Berezhny 2020-01-02 22:50:27 -05:00
parent fb1af9e3d2
commit cbc6d6a572
9 changed files with 88 additions and 118 deletions

View file

@ -1,17 +1,16 @@
import os
import time
import json
import ecdsa
import logging
import typing
import asyncio
import random
from functools import partial
from hashlib import sha256
from string import hexdigits
from typing import Type, Dict, Tuple, Optional, Any, List
import ecdsa
from lbry.error import InvalidPasswordError
from lbry.crypto.crypt import aes_encrypt, aes_decrypt

View file

@ -360,12 +360,12 @@ class Database(SQLiteMixin):
"""
CREATE_TABLES_QUERY = (
PRAGMAS +
CREATE_ACCOUNT_TABLE +
CREATE_PUBKEY_ADDRESS_TABLE +
CREATE_TX_TABLE +
CREATE_TXO_TABLE +
CREATE_TXI_TABLE
PRAGMAS +
CREATE_ACCOUNT_TABLE +
CREATE_PUBKEY_ADDRESS_TABLE +
CREATE_TX_TABLE +
CREATE_TXO_TABLE +
CREATE_TXI_TABLE
)
@staticmethod

View file

@ -24,17 +24,17 @@ class InvalidHeader(Exception):
self.height = height
class BaseHeaders:
class Headers:
header_size: int
chunk_size: int
header_size = 112
chunk_size = 10**16
max_target: int
genesis_hash: Optional[bytes]
target_timespan: int
max_target = 0x0000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
genesis_hash = b'9c89283ba0f3227f6c03b70216b9f665f0118d5e0fa729cedf4fb34d6a34f463'
target_timespan = 150
checkpoint = (600_000, b'100b33ca3d0b86a48f0d6d6f30458a130ecb89d5affefe4afccb134d5a40f4c2')
validate_difficulty: bool = True
checkpoint = None
def __init__(self, path) -> None:
if path == ':memory:':
@ -53,20 +53,48 @@ class BaseHeaders:
self.io.close()
@staticmethod
def serialize(header: dict) -> bytes:
raise NotImplementedError
def serialize(header):
return b''.join([
struct.pack('<I', header['version']),
unhexlify(header['prev_block_hash'])[::-1],
unhexlify(header['merkle_root'])[::-1],
unhexlify(header['claim_trie_root'])[::-1],
struct.pack('<III', header['timestamp'], header['bits'], header['nonce'])
])
@staticmethod
def deserialize(height, header):
raise NotImplementedError
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hexlify(header[4:36][::-1]),
'merkle_root': hexlify(header[36:68][::-1]),
'claim_trie_root': hexlify(header[68:100][::-1]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
def get_next_chunk_target(self, chunk: int) -> ArithUint256:
return ArithUint256(self.max_target)
@staticmethod
def get_next_block_target(chunk_target: ArithUint256, previous: Optional[dict],
def get_next_block_target(self, max_target: ArithUint256, previous: Optional[dict],
current: Optional[dict]) -> ArithUint256:
return chunk_target
# https://github.com/lbryio/lbrycrd/blob/master/src/lbry.cpp
if previous is None and current is None:
return max_target
if previous is None:
previous = current
actual_timespan = current['timestamp'] - previous['timestamp']
modulated_timespan = self.target_timespan + int((actual_timespan - self.target_timespan) / 8)
minimum_timespan = self.target_timespan - int(self.target_timespan / 8) # 150 - 18 = 132
maximum_timespan = self.target_timespan + int(self.target_timespan / 2) # 150 + 75 = 225
clamped_timespan = max(minimum_timespan, min(modulated_timespan, maximum_timespan))
target = ArithUint256.from_compact(current['bits'])
new_target = min(max_target, (target * clamped_timespan) / self.target_timespan)
return new_target
def __len__(self) -> int:
if self._size is None:
@ -228,9 +256,9 @@ class BaseHeaders:
return
previous_header_hash = header_hash
@staticmethod
def get_proof_of_work(header_hash: bytes) -> ArithUint256:
return ArithUint256(int(b'0x' + header_hash, 16))
@classmethod
def get_proof_of_work(cls, header_hash: bytes):
return ArithUint256(int(b'0x' + cls.header_hash_to_pow_hash(header_hash), 16))
def _iterate_chunks(self, height: int, headers: bytes) -> Iterator[Tuple[int, bytes]]:
assert len(headers) % self.header_size == 0, f"{len(headers)} {len(headers)%self.header_size}"
@ -248,68 +276,10 @@ class BaseHeaders:
header = headers[start:end]
yield self.hash_header(header), self.deserialize(height+idx, header)
class Headers(BaseHeaders):
header_size = 112
chunk_size = 10**16
max_target = 0x0000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
genesis_hash = b'9c89283ba0f3227f6c03b70216b9f665f0118d5e0fa729cedf4fb34d6a34f463'
target_timespan = 150
checkpoint = (600_000, b'100b33ca3d0b86a48f0d6d6f30458a130ecb89d5affefe4afccb134d5a40f4c2')
@property
def claim_trie_root(self):
return self[self.height]['claim_trie_root']
@staticmethod
def serialize(header):
return b''.join([
struct.pack('<I', header['version']),
unhexlify(header['prev_block_hash'])[::-1],
unhexlify(header['merkle_root'])[::-1],
unhexlify(header['claim_trie_root'])[::-1],
struct.pack('<III', header['timestamp'], header['bits'], header['nonce'])
])
@staticmethod
def deserialize(height, header):
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hexlify(header[4:36][::-1]),
'merkle_root': hexlify(header[36:68][::-1]),
'claim_trie_root': hexlify(header[68:100][::-1]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
def get_next_block_target(self, max_target: ArithUint256, previous: Optional[dict],
current: Optional[dict]) -> ArithUint256:
# https://github.com/lbryio/lbrycrd/blob/master/src/lbry.cpp
if previous is None and current is None:
return max_target
if previous is None:
previous = current
actual_timespan = current['timestamp'] - previous['timestamp']
modulated_timespan = self.target_timespan + int((actual_timespan - self.target_timespan) / 8)
minimum_timespan = self.target_timespan - int(self.target_timespan / 8) # 150 - 18 = 132
maximum_timespan = self.target_timespan + int(self.target_timespan / 2) # 150 + 75 = 225
clamped_timespan = max(minimum_timespan, min(modulated_timespan, maximum_timespan))
target = ArithUint256.from_compact(current['bits'])
new_target = min(max_target, (target * clamped_timespan) / self.target_timespan)
return new_target
@classmethod
def get_proof_of_work(cls, header_hash: bytes):
return super().get_proof_of_work(
cls.header_hash_to_pow_hash(header_hash)
)
@staticmethod
def header_hash_to_pow_hash(header_hash: bytes):
header_hash_bytes = unhexlify(header_hash)[::-1]

View file

@ -1,10 +1,8 @@
import os
import zlib
import pylru
import base64
import asyncio
import logging
from io import StringIO
from datetime import datetime
from functools import partial
@ -13,6 +11,7 @@ from collections import namedtuple
from binascii import hexlify, unhexlify
from typing import Dict, Tuple, Type, Iterable, List, Optional
import pylru
from lbry.schema.result import Outputs
from lbry.schema.url import URL
from lbry.crypto.hash import hash160, double_sha256, sha256
@ -407,7 +406,7 @@ class Ledger(metaclass=LedgerRegistry):
"Will not continue to attempt reorganizing. Please, delete the ledger "
"synchronization directory inside your wallet directory (folder: '{}') and "
"restart the program to synchronize from scratch."
.format(rewound, self.get_id())
.format(rewound, self.get_id())
)
headers = None # ready to download some more headers
@ -686,7 +685,7 @@ class Ledger(metaclass=LedgerRegistry):
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims. ",
account.id, balance, total_receiving, account.receiving.gap, total_change,
account.change.gap, channel_count, len(account.channel_keys), claim_count)
except:
except: # pylint: disable=bare-except
log.exception(
'Failed to display wallet state, please file issue '
'for this bug along with the traceback you see below:')
@ -709,7 +708,7 @@ class Ledger(metaclass=LedgerRegistry):
claim_ids = [p.purchased_claim_id for p in purchases]
try:
resolved, _, _ = await self.claim_search([], claim_ids=claim_ids)
except:
except: # pylint: disable=bare-except
log.exception("Resolve failed while looking up purchased claim ids:")
resolved = []
lookup = {claim.claim_id: claim for claim in resolved}
@ -742,7 +741,7 @@ class Ledger(metaclass=LedgerRegistry):
claim_ids = collection.claim.collection.claims.ids[offset:page_size+offset]
try:
resolve_results, _, _ = await self.claim_search([], claim_ids=claim_ids)
except:
except: # pylint: disable=bare-except
log.exception("Resolve failed while looking up collection claim ids:")
return []
claims = []
@ -777,7 +776,7 @@ class Ledger(metaclass=LedgerRegistry):
txs: List[Transaction] = await self.db.get_transactions(**constraints)
headers = self.headers
history = []
for tx in txs:
for tx in txs: # pylint: disable=too-many-nested-blocks
ts = headers[tx.height]['timestamp'] if tx.height > 0 else None
item = {
'txid': tx.id,

View file

@ -1,5 +1,6 @@
import os
import json
import typing
import logging
import asyncio
from binascii import unhexlify
@ -7,7 +8,6 @@ from decimal import Decimal
from typing import List, Type, MutableSequence, MutableMapping, Optional
from lbry.error import KeyFeeAboveMaxAllowedError
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
from lbry.conf import Config
from .dewies import dewies_to_lbc
@ -18,6 +18,9 @@ from .database import Database
from .wallet import Wallet, WalletStorage, ENCRYPT_ON_DISK
from .rpc.jsonrpc import CodeMessageError
if typing.TYPE_CHECKING:
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
log = logging.getLogger(__name__)

View file

@ -3,8 +3,9 @@ import argparse
import asyncio
import aiohttp
from lbry import wallet
from lbry.wallet.orchstr8.node import (
Conductor, get_ledger_from_environment, get_blockchain_node_from_ledger
Conductor, get_blockchain_node_from_ledger
)
from lbry.wallet.orchstr8.service import ConductorService
@ -43,11 +44,10 @@ def main():
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
ledger = get_ledger_from_environment()
if command == 'download':
logging.getLogger('blockchain').setLevel(logging.INFO)
get_blockchain_node_from_ledger(ledger).ensure()
get_blockchain_node_from_ledger(wallet).ensure()
elif command == 'generate':
loop.run_until_complete(run_remote_command(

View file

@ -421,10 +421,10 @@ class OutputScript(Script):
OP_2DROP, OP_DROP
)
CLAIM_NAME_PUBKEY = Template('claim_name+pay_pubkey_hash', (
CLAIM_NAME_OPCODES + PAY_PUBKEY_HASH.opcodes
CLAIM_NAME_OPCODES + PAY_PUBKEY_HASH.opcodes
))
CLAIM_NAME_SCRIPT = Template('claim_name+pay_script_hash', (
CLAIM_NAME_OPCODES + PAY_SCRIPT_HASH.opcodes
CLAIM_NAME_OPCODES + PAY_SCRIPT_HASH.opcodes
))
SUPPORT_CLAIM_OPCODES = (
@ -432,10 +432,10 @@ class OutputScript(Script):
OP_2DROP, OP_DROP
)
SUPPORT_CLAIM_PUBKEY = Template('support_claim+pay_pubkey_hash', (
SUPPORT_CLAIM_OPCODES + PAY_PUBKEY_HASH.opcodes
SUPPORT_CLAIM_OPCODES + PAY_PUBKEY_HASH.opcodes
))
SUPPORT_CLAIM_SCRIPT = Template('support_claim+pay_script_hash', (
SUPPORT_CLAIM_OPCODES + PAY_SCRIPT_HASH.opcodes
SUPPORT_CLAIM_OPCODES + PAY_SCRIPT_HASH.opcodes
))
UPDATE_CLAIM_OPCODES = (
@ -443,10 +443,10 @@ class OutputScript(Script):
OP_2DROP, OP_2DROP
)
UPDATE_CLAIM_PUBKEY = Template('update_claim+pay_pubkey_hash', (
UPDATE_CLAIM_OPCODES + PAY_PUBKEY_HASH.opcodes
UPDATE_CLAIM_OPCODES + PAY_PUBKEY_HASH.opcodes
))
UPDATE_CLAIM_SCRIPT = Template('update_claim+pay_script_hash', (
UPDATE_CLAIM_OPCODES + PAY_SCRIPT_HASH.opcodes
UPDATE_CLAIM_OPCODES + PAY_SCRIPT_HASH.opcodes
))
SELL_SCRIPT = Template('sell_script', (

View file

@ -1,12 +1,11 @@
import ecdsa
import struct
import hashlib
import logging
import typing
from binascii import hexlify, unhexlify
from typing import List, Iterable, Optional, Tuple
import ecdsa
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import load_der_public_key
from cryptography.hazmat.primitives import hashes
@ -329,7 +328,7 @@ class Output(InputOutput):
def can_decode_claim(self):
try:
return self.claim
except:
except: # pylint: disable=bare-except
return False
@property
@ -434,8 +433,8 @@ class Output(InputOutput):
@property
def is_purchase_data(self) -> bool:
return self.script.is_return_data and (
isinstance(self.script.values['data'], Purchase) or
Purchase.has_start_byte(self.script.values['data'])
isinstance(self.script.values['data'], Purchase) or
Purchase.has_start_byte(self.script.values['data'])
)
@property
@ -450,7 +449,7 @@ class Output(InputOutput):
def can_decode_purchase_data(self):
try:
return self.purchase_data
except:
except: # pylint: disable=bare-except
return False
@property
@ -569,9 +568,9 @@ class Transaction:
def base_size(self) -> int:
""" Size of transaction without inputs or outputs in bytes. """
return (
self.size
- sum(txi.size for txi in self._inputs)
- sum(txo.size for txo in self._outputs)
self.size
- sum(txi.size for txi in self._inputs)
- sum(txo.size for txo in self._outputs)
)
@property
@ -718,8 +717,8 @@ class Transaction:
# value of the outputs plus associated fees
cost = (
tx.get_base_fee(ledger) +
tx.get_total_output_sum(ledger)
tx.get_base_fee(ledger) +
tx.get_total_output_sum(ledger)
)
# value of the inputs less the cost to spend those inputs
payment = tx.get_effective_input_sum(ledger)
@ -737,8 +736,8 @@ class Transaction:
tx.add_inputs(s.txi for s in spendables)
cost_of_change = (
tx.get_base_fee(ledger) +
Output.pay_pubkey_hash(COIN, NULL_HASH32).get_fee(ledger)
tx.get_base_fee(ledger) +
Output.pay_pubkey_hash(COIN, NULL_HASH32).get_fee(ledger)
)
if payment > cost:
change = payment - cost
@ -799,7 +798,7 @@ class Transaction:
@classmethod
def pay(cls, amount: int, address: bytes, funding_accounts: List['Account'], change_account: 'Account'):
ledger, wallet = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
output = Output.pay_pubkey_hash(amount, ledger.address_to_hash160(address))
return cls.create([], [output], funding_accounts, change_account)
@ -807,7 +806,7 @@ class Transaction:
def claim_create(
cls, name: str, claim: Claim, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account', signing_channel: Output = None):
ledger, wallet = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
claim_output = Output.pay_claim_name_pubkey_hash(
amount, name, claim, ledger.address_to_hash160(holding_address)
)
@ -819,7 +818,7 @@ class Transaction:
def claim_update(
cls, previous_claim: Output, claim: Claim, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account', signing_channel: Output = None):
ledger, wallet = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
updated_claim = Output.pay_update_claim_pubkey_hash(
amount, previous_claim.claim_name, previous_claim.claim_id,
claim, ledger.address_to_hash160(holding_address)
@ -835,7 +834,7 @@ class Transaction:
@classmethod
def support(cls, claim_name: str, claim_id: str, amount: int, holding_address: str,
funding_accounts: List['Account'], change_account: 'Account'):
ledger, wallet = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
support_output = Output.pay_support_pubkey_hash(
amount, claim_name, claim_id, ledger.address_to_hash160(holding_address)
)
@ -844,7 +843,7 @@ class Transaction:
@classmethod
def purchase(cls, claim_id: str, amount: int, merchant_address: bytes,
funding_accounts: List['Account'], change_account: 'Account'):
ledger, wallet = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
ledger, _ = cls.ensure_all_have_same_ledger_and_wallet(funding_accounts, change_account)
payment = Output.pay_pubkey_hash(amount, ledger.address_to_hash160(merchant_address))
data = Output.add_purchase_data(Purchase(claim_id))
return cls.create([], [payment, data], funding_accounts, change_account)

View file

@ -14,7 +14,7 @@ ignore=words,server,rpc,schema
max-parents=10
max-args=10
max-line-length=120
good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv
good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv,ts
valid-metaclass-classmethod-first-arg=mcs
disable=
fixme,