pylint and a few other fixes
This commit is contained in:
parent
5b9d41fd5c
commit
25436893bc
5 changed files with 53 additions and 12 deletions
|
@ -12,7 +12,6 @@ from aioupnp import __version__ as aioupnp_version
|
|||
from aioupnp.upnp import UPnP
|
||||
from aioupnp.fault import UPnPError
|
||||
|
||||
import lbrynet.schema
|
||||
from lbrynet import utils
|
||||
from lbrynet.conf import HEADERS_FILE_SHA256_CHECKSUM
|
||||
from lbrynet.dht.node import Node
|
||||
|
|
|
@ -5,7 +5,6 @@ import json
|
|||
import time
|
||||
import inspect
|
||||
import typing
|
||||
import aiohttp
|
||||
import base58
|
||||
import random
|
||||
from urllib.parse import urlencode, quote
|
||||
|
@ -21,7 +20,7 @@ from lbrynet import utils
|
|||
from lbrynet.conf import Config, Setting
|
||||
from lbrynet.blob.blob_file import is_valid_blobhash
|
||||
from lbrynet.blob_exchange.downloader import download_blob
|
||||
from lbrynet.error import InsufficientFundsError, DownloadSDTimeout, ComponentsNotStarted
|
||||
from lbrynet.error import DownloadSDTimeout, ComponentsNotStarted
|
||||
from lbrynet.error import NullFundsError, NegativeFundsError, ComponentStartConditionNotMet
|
||||
from lbrynet.extras import system_info
|
||||
from lbrynet.extras.daemon import analytics
|
||||
|
@ -33,7 +32,7 @@ from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
|||
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
|
||||
from lbrynet.extras.daemon.undecorated import undecorated
|
||||
from lbrynet.wallet.transaction import Transaction, Output
|
||||
from lbrynet.wallet.account import Account as LBCAccount, validate_claim_id
|
||||
from lbrynet.wallet.account import Account as LBCAccount
|
||||
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
|
||||
from lbrynet.schema.claim import Claim
|
||||
from lbrynet.schema.uri import parse_lbry_uri, URIParseError
|
||||
|
@ -2193,6 +2192,54 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
response['claims'] = sort_claim_results(response['claims'])
|
||||
return response
|
||||
|
||||
@deprecated()
|
||||
async def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]):
|
||||
"""
|
||||
Get paginated claims in a channel specified by a channel uri
|
||||
Usage:
|
||||
claim_list_by_channel (<uri> | --uri=<uri>) [<uris>...] [--page=<page>]
|
||||
[--page_size=<page_size>]
|
||||
Options:
|
||||
--uri=<uri> : (str) uri of the channel
|
||||
--uris=<uris> : (list) uris of the channel
|
||||
--page=<page> : (int) which page of results to return where page 1 is the first
|
||||
page, defaults to no pages
|
||||
--page_size=<page_size> : (int) number of results in a page, default of 10
|
||||
Returns:
|
||||
{
|
||||
resolved channel uri: {
|
||||
If there was an error:
|
||||
'error': (str) error message
|
||||
'claims_in_channel': the total number of results for the channel,
|
||||
If a page of results was requested:
|
||||
'returned_page': page number returned,
|
||||
'claims_in_channel': [
|
||||
{
|
||||
'absolute_channel_position': (int) claim index number in sorted list of
|
||||
claims which assert to be part of the
|
||||
channel
|
||||
'address': (str) claim address,
|
||||
'amount': (float) claim amount,
|
||||
'effective_amount': (float) claim amount including supports,
|
||||
'claim_id': (str) claim id,
|
||||
'decoded_claim': (bool) whether or not the claim value was decoded,
|
||||
'height': (int) claim height,
|
||||
'depth': (int) claim depth,
|
||||
'has_signature': (bool) included if decoded_claim
|
||||
'name': (str) claim name,
|
||||
'supports: (list) list of supports [{'txid': (str) txid,
|
||||
'nout': (int) nout,
|
||||
'amount': (float) amount}],
|
||||
'txid': (str) claim txid,
|
||||
'nout': (str) claim nout,
|
||||
'signature_is_valid': (bool), included if has_signature,
|
||||
'value': ClaimDict if decoded, otherwise hex string
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
uris = tuple(uris)
|
||||
page = int(page)
|
||||
page_size = int(page_size)
|
||||
|
|
|
@ -1,16 +1,12 @@
|
|||
import json
|
||||
import logging
|
||||
import binascii
|
||||
import typing
|
||||
from hashlib import sha256
|
||||
from string import hexdigits
|
||||
|
||||
from torba.client.baseaccount import BaseAccount
|
||||
from torba.client.basetransaction import TXORef
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from lbrynet.wallet import ledger
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -25,7 +21,6 @@ def validate_claim_id(claim_id):
|
|||
|
||||
|
||||
class Account(BaseAccount):
|
||||
ledger: 'ledger.MainNetLedger'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
@ -183,7 +178,7 @@ class Account(BaseAccount):
|
|||
return super().get_balance(confirmations, **constraints)
|
||||
|
||||
@classmethod
|
||||
def get_private_key_from_seed(cls, ledger: 'ledger.MainNetLedger', seed: str, password: str):
|
||||
def get_private_key_from_seed(cls, ledger, seed: str, password: str):
|
||||
return super().get_private_key_from_seed(
|
||||
ledger, seed, password or 'lbryum'
|
||||
)
|
||||
|
|
|
@ -99,7 +99,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
|
|||
# 1 LBC to which Chris readily obliges
|
||||
ramsey_account_id = (await self.daemon.jsonrpc_account_create("Ramsey"))['id']
|
||||
ramsey_address = await self.daemon.jsonrpc_address_unused(ramsey_account_id)
|
||||
result = await self.out(self.daemon.jsonrpc_wallet_send('1.0', ramsey_address))
|
||||
result = await self.out(self.daemon.jsonrpc_account_send('1.0', ramsey_address))
|
||||
self.assertIn("txid", result)
|
||||
await self.confirm_tx(result['txid'])
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ class TransactionCommandsTestCase(CommandTestCase):
|
|||
|
||||
async def test_transaction_show(self):
|
||||
# local tx
|
||||
result = await self.out(self.daemon.jsonrpc_wallet_send(
|
||||
result = await self.out(self.daemon.jsonrpc_account_send(
|
||||
'5.0', await self.daemon.jsonrpc_address_unused(self.account.id)
|
||||
))
|
||||
await self.confirm_tx(result['txid'])
|
||||
|
|
Loading…
Reference in a new issue