2020-05-01 15:33:58 +02:00
|
|
|
import json
|
|
|
|
from typing import Union, Callable, Optional, List
|
|
|
|
from binascii import hexlify, unhexlify
|
|
|
|
|
|
|
|
from functools import partial
|
|
|
|
import aiohttp
|
|
|
|
|
|
|
|
from lbry.conf import Setting, NOT_SET
|
|
|
|
from lbry.blockchain.dewies import dewies_to_lbc, dict_values_to_lbc, lbc_to_dewies
|
|
|
|
from lbry.db.utils import constrain_single_or_list
|
|
|
|
from lbry.db.constants import TXO_TYPES
|
|
|
|
|
|
|
|
|
|
|
|
from lbry.service.base import Service
|
|
|
|
|
|
|
|
|
|
|
|
DEFAULT_PAGE_SIZE = 20
|
|
|
|
|
|
|
|
|
|
|
|
async def paginate_rows(get_records: Callable, get_record_count: Optional[Callable],
|
|
|
|
page: Optional[int], page_size: Optional[int], **constraints):
|
|
|
|
page = max(1, page or 1)
|
|
|
|
page_size = max(1, page_size or DEFAULT_PAGE_SIZE)
|
|
|
|
constraints.update({
|
|
|
|
"offset": page_size * (page - 1),
|
|
|
|
"limit": page_size
|
|
|
|
})
|
|
|
|
items = await get_records(**constraints)
|
|
|
|
result = {"items": items, "page": page, "page_size": page_size}
|
|
|
|
if get_record_count is not None:
|
|
|
|
total_items = await get_record_count(**constraints)
|
|
|
|
result["total_pages"] = int((total_items + (page_size - 1)) / page_size)
|
|
|
|
result["total_items"] = total_items
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def paginate_list(items: List, page: Optional[int], page_size: Optional[int]):
|
|
|
|
page = max(1, page or 1)
|
|
|
|
page_size = max(1, page_size or DEFAULT_PAGE_SIZE)
|
|
|
|
total_items = len(items)
|
|
|
|
offset = page_size * (page - 1)
|
|
|
|
subitems = []
|
|
|
|
if offset <= total_items:
|
|
|
|
subitems = items[offset:offset+page_size]
|
|
|
|
return {
|
|
|
|
"items": subitems,
|
|
|
|
"total_pages": int((total_items + (page_size - 1)) / page_size),
|
|
|
|
"total_items": total_items,
|
|
|
|
"page": page, "page_size": page_size
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class API:
|
|
|
|
|
|
|
|
def __init__(self, service: Service):
|
|
|
|
self.service = service
|
2020-05-06 16:53:31 +02:00
|
|
|
self.wallets = service.wallets
|
|
|
|
self.ledger = service.ledger
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def stop(self):
|
|
|
|
"""
|
|
|
|
Stop lbrynet API server.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stop
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(string) Shutdown message
|
|
|
|
"""
|
|
|
|
return await self.service.stop()
|
|
|
|
|
|
|
|
async def ffmpeg_find(self):
|
|
|
|
"""
|
|
|
|
Get ffmpeg installation information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
ffmpeg_find
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary of ffmpeg information
|
|
|
|
{
|
|
|
|
'available': (bool) found ffmpeg,
|
|
|
|
'which': (str) path to ffmpeg,
|
|
|
|
'analyze_audio_volume': (bool) should ffmpeg analyze audio
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return await self.service.find_ffmpeg()
|
|
|
|
|
|
|
|
async def status(self):
|
|
|
|
"""
|
|
|
|
Get daemon status
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
status
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) lbrynet-daemon status
|
|
|
|
{
|
|
|
|
'installation_id': (str) installation id - base58,
|
|
|
|
'is_running': (bool),
|
|
|
|
'skipped_components': (list) [names of skipped components (str)],
|
|
|
|
'startup_status': { Does not include components which have been skipped
|
|
|
|
'blob_manager': (bool),
|
|
|
|
'blockchain_headers': (bool),
|
|
|
|
'database': (bool),
|
|
|
|
'dht': (bool),
|
|
|
|
'exchange_rate_manager': (bool),
|
|
|
|
'hash_announcer': (bool),
|
|
|
|
'peer_protocol_server': (bool),
|
|
|
|
'stream_manager': (bool),
|
|
|
|
'upnp': (bool),
|
|
|
|
'wallet': (bool),
|
|
|
|
},
|
|
|
|
'connection_status': {
|
|
|
|
'code': (str) connection status code,
|
|
|
|
'message': (str) connection status message
|
|
|
|
},
|
|
|
|
'blockchain_headers': {
|
|
|
|
'downloading_headers': (bool),
|
|
|
|
'download_progress': (float) 0-100.0
|
|
|
|
},
|
|
|
|
'wallet': {
|
|
|
|
'connected': (str) host and port of the connected spv server,
|
|
|
|
'blocks': (int) local blockchain height,
|
|
|
|
'blocks_behind': (int) remote_height - local_height,
|
|
|
|
'best_blockhash': (str) block hash of most recent block,
|
|
|
|
'is_encrypted': (bool),
|
|
|
|
'is_locked': (bool),
|
|
|
|
'connected_servers': (list) [
|
|
|
|
{
|
|
|
|
'host': (str) server hostname,
|
|
|
|
'port': (int) server port,
|
|
|
|
'latency': (int) milliseconds
|
|
|
|
}
|
|
|
|
],
|
|
|
|
},
|
|
|
|
'dht': {
|
|
|
|
'node_id': (str) lbry dht node id - hex encoded,
|
|
|
|
'peers_in_routing_table': (int) the number of peers in the routing table,
|
|
|
|
},
|
|
|
|
'blob_manager': {
|
|
|
|
'finished_blobs': (int) number of finished blobs in the blob manager,
|
|
|
|
'connections': {
|
|
|
|
'incoming_bps': {
|
|
|
|
<source ip and tcp port>: (int) bytes per second received,
|
|
|
|
},
|
|
|
|
'outgoing_bps': {
|
|
|
|
<destination ip and tcp port>: (int) bytes per second sent,
|
|
|
|
},
|
|
|
|
'total_outgoing_mps': (float) megabytes per second sent,
|
|
|
|
'total_incoming_mps': (float) megabytes per second received,
|
|
|
|
'time': (float) timestamp
|
|
|
|
}
|
|
|
|
},
|
|
|
|
'hash_announcer': {
|
|
|
|
'announce_queue_size': (int) number of blobs currently queued to be announced
|
|
|
|
},
|
|
|
|
'stream_manager': {
|
|
|
|
'managed_files': (int) count of files in the stream manager,
|
|
|
|
},
|
|
|
|
'upnp': {
|
|
|
|
'aioupnp_version': (str),
|
|
|
|
'redirects': {
|
|
|
|
<TCP | UDP>: (int) external_port,
|
|
|
|
},
|
|
|
|
'gateway': (str) manufacturer and model,
|
|
|
|
'dht_redirect_set': (bool),
|
|
|
|
'peer_redirect_set': (bool),
|
|
|
|
'external_ip': (str) external ip address,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return await self.service.get_status()
|
|
|
|
|
|
|
|
async def version(self):
|
|
|
|
"""
|
|
|
|
Get lbrynet API server version information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
version
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary of lbry version information
|
|
|
|
{
|
|
|
|
'processor': (str) processor type,
|
|
|
|
'python_version': (str) python version,
|
|
|
|
'platform': (str) platform string,
|
|
|
|
'os_release': (str) os release string,
|
|
|
|
'os_system': (str) os name,
|
|
|
|
'version': (str) lbrynet version,
|
|
|
|
'build': (str) "dev" | "qa" | "rc" | "release",
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return await self.service.get_version()
|
|
|
|
|
|
|
|
async def resolve(self, urls: Union[str, list], wallet_id=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Get the claim that a URL refers to.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
resolve <urls>... [--wallet_id=<wallet_id>]
|
|
|
|
[--include_purchase_receipt]
|
|
|
|
[--include_is_my_output]
|
|
|
|
[--include_sent_supports]
|
|
|
|
[--include_sent_tips]
|
|
|
|
[--include_received_tips]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--urls=<urls> : (str, list) one or more urls to resolve
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase reciepts
|
|
|
|
--include_purchase_receipt : (bool) lookup and include a receipt if this wallet
|
|
|
|
has purchased the claim being resolved
|
|
|
|
--include_is_my_output : (bool) lookup and include a boolean indicating
|
|
|
|
if claim being resolved is yours
|
|
|
|
--include_sent_supports : (bool) lookup and sum the total amount
|
|
|
|
of supports you've made to this claim
|
|
|
|
--include_sent_tips : (bool) lookup and sum the total amount
|
|
|
|
of tips you've made to this claim
|
|
|
|
(only makes sense when claim is not yours)
|
|
|
|
--include_received_tips : (bool) lookup and sum the total amount
|
|
|
|
of tips you've received to this claim
|
|
|
|
(only makes sense when claim is yours)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Dictionary of results, keyed by url
|
|
|
|
'<url>': {
|
|
|
|
If a resolution error occurs:
|
|
|
|
'error': Error message
|
|
|
|
|
|
|
|
If the url resolves to a channel or a claim in a channel:
|
|
|
|
'certificate': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number (or -1 if unknown),
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'confirmations': (int) claim depth,
|
|
|
|
'timestamp': (int) timestamp of the block that included this claim tx,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
|
|
|
'permanent_url': (str) permanent url of the certificate claim,
|
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}],
|
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
|
|
|
|
If the url resolves to a channel:
|
|
|
|
'claims_in_channel': (int) number of claims in the channel,
|
|
|
|
|
|
|
|
If the url resolves to a claim:
|
|
|
|
'claim': {
|
|
|
|
'address': (str) claim address,
|
|
|
|
'amount': (float) claim amount,
|
|
|
|
'effective_amount': (float) claim amount including supports,
|
|
|
|
'claim_id': (str) claim id,
|
|
|
|
'claim_sequence': (int) claim sequence number (or -1 if unknown),
|
|
|
|
'decoded_claim': (bool) whether or not the claim value was decoded,
|
|
|
|
'height': (int) claim height,
|
|
|
|
'depth': (int) claim depth,
|
|
|
|
'has_signature': (bool) included if decoded_claim
|
|
|
|
'name': (str) claim name,
|
|
|
|
'permanent_url': (str) permanent url of the claim,
|
|
|
|
'channel_name': (str) channel name if claim is in a channel
|
|
|
|
'supports: (list) list of supports [{'txid': (str) txid,
|
|
|
|
'nout': (int) nout,
|
|
|
|
'amount': (float) amount}]
|
|
|
|
'txid': (str) claim txid,
|
|
|
|
'nout': (str) claim nout,
|
|
|
|
'signature_is_valid': (bool), included if has_signature,
|
|
|
|
'value': ClaimDict if decoded, otherwise hex string
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
if isinstance(urls, str):
|
|
|
|
urls = [urls]
|
|
|
|
return await self.service.resolve(
|
2020-05-06 16:53:31 +02:00
|
|
|
urls, wallet=self.wallets.get_or_default(wallet_id), **kwargs
|
2020-05-01 15:33:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
async def get(
|
|
|
|
self, uri, file_name=None, download_directory=None, timeout=None, save_file=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Download stream from a LBRY name.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
get <uri> [<file_name> | --file_name=<file_name>]
|
|
|
|
[<download_directory> | --download_directory=<download_directory>] [<timeout> | --timeout=<timeout>]
|
|
|
|
[--save_file=<save_file>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--uri=<uri> : (str) uri of the content to download
|
|
|
|
--file_name=<file_name> : (str) specified name for the downloaded file, overrides the stream file name
|
|
|
|
--download_directory=<download_directory> : (str) full path to the directory to download into
|
|
|
|
--timeout=<timeout> : (int) download timeout in number of seconds
|
|
|
|
--save_file=<save_file> : (bool) save the file to the downloads directory
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase reciepts
|
|
|
|
|
|
|
|
Returns: {File}
|
|
|
|
"""
|
|
|
|
return await self.service.get(
|
|
|
|
uri, file_name=file_name, download_directory=download_directory, timeout=timeout, save_file=save_file,
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet=self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
SETTINGS_DOC = """
|
|
|
|
Settings management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def settings_get(self):
|
|
|
|
"""
|
|
|
|
Get daemon settings
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
settings_get
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary of daemon settings
|
|
|
|
See ADJUSTABLE_SETTINGS in lbry/conf.py for full list of settings
|
|
|
|
"""
|
|
|
|
return self.service.ledger.conf.settings_dict
|
|
|
|
|
|
|
|
async def settings_set(self, key, value):
|
|
|
|
"""
|
|
|
|
Set daemon settings
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
settings_set (<key>) (<value>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Updated dictionary of daemon settings
|
|
|
|
"""
|
|
|
|
with self.service.ledger.conf.update_config() as c:
|
|
|
|
if value and isinstance(value, str) and value[0] in ('[', '{'):
|
|
|
|
value = json.loads(value)
|
|
|
|
attr: Setting = getattr(type(c), key)
|
|
|
|
cleaned = attr.deserialize(value)
|
|
|
|
setattr(c, key, cleaned)
|
|
|
|
return {key: cleaned}
|
|
|
|
|
|
|
|
async def settings_clear(self, key):
|
|
|
|
"""
|
|
|
|
Clear daemon settings
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
settings_clear (<key>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Updated dictionary of daemon settings
|
|
|
|
"""
|
|
|
|
with self.service.ledger.conf.update_config() as c:
|
|
|
|
setattr(c, key, NOT_SET)
|
|
|
|
return {key: self.service.ledger.conf.settings_dict[key]}
|
|
|
|
|
|
|
|
PREFERENCE_DOC = """
|
|
|
|
Preferences management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def preference_get(self, key=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Get preference value for key or all values if not key is passed in.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
preference_get [<key>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--key=<key> : (str) key associated with value
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary of preference(s)
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if key:
|
|
|
|
if key in wallet.preferences:
|
|
|
|
return {key: wallet.preferences[key]}
|
|
|
|
return
|
|
|
|
return wallet.preferences.to_dict_without_ts()
|
|
|
|
|
|
|
|
async def preference_set(self, key, value, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Set preferences
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
preference_set (<key>) (<value>) [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--key=<key> : (str) key associated with value
|
|
|
|
--value=<key> : (str) key associated with value
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Dictionary with key/value of new preference
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if value and isinstance(value, str) and value[0] in ('[', '{'):
|
|
|
|
value = json.loads(value)
|
|
|
|
wallet.preferences[key] = value
|
|
|
|
wallet.save()
|
|
|
|
return {key: value}
|
|
|
|
|
|
|
|
WALLET_DOC = """
|
|
|
|
Create, modify and inspect wallets.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def wallet_list(self, wallet_id=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List wallets.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_list [--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) show specific wallet only
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Wallet]}
|
|
|
|
"""
|
|
|
|
if wallet_id:
|
2020-05-06 16:53:31 +02:00
|
|
|
return paginate_list([self.wallets.get_wallet_or_error(wallet_id)], 1, 1)
|
|
|
|
return paginate_list(self.wallets.wallets, page, page_size)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_reconnect(self):
|
|
|
|
"""
|
|
|
|
Reconnects ledger network client, applying new configurations.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_reconnect
|
|
|
|
|
|
|
|
Options:
|
|
|
|
|
|
|
|
Returns: None
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
return self.wallets.reset()
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_create(
|
|
|
|
self, wallet_id, skip_on_startup=False, create_account=False, single_key=False):
|
|
|
|
"""
|
|
|
|
Create a new wallet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_create (<wallet_id> | --wallet_id=<wallet_id>) [--skip_on_startup]
|
|
|
|
[--create_account] [--single_key]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet file name
|
|
|
|
--skip_on_startup : (bool) don't add wallet to daemon_settings.yml
|
|
|
|
--create_account : (bool) generates the default account
|
|
|
|
--single_key : (bool) used with --create_account, creates single-key account
|
|
|
|
|
|
|
|
Returns: {Wallet}
|
|
|
|
"""
|
|
|
|
wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id)
|
2020-05-06 16:53:31 +02:00
|
|
|
for wallet in self.wallets.wallets:
|
2020-05-01 15:33:58 +02:00
|
|
|
if wallet.id == wallet_id:
|
|
|
|
raise Exception(f"Wallet at path '{wallet_path}' already exists and is loaded.")
|
|
|
|
if os.path.exists(wallet_path):
|
|
|
|
raise Exception(f"Wallet at path '{wallet_path}' already exists, use 'wallet_add' to load wallet.")
|
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.import_wallet(wallet_path)
|
2020-05-01 15:33:58 +02:00
|
|
|
if not wallet.accounts and create_account:
|
|
|
|
account = Account.generate(
|
|
|
|
self.ledger, wallet, address_generator={
|
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if self.ledger.sync.network.is_connected:
|
|
|
|
await self.ledger.subscribe_account(account)
|
|
|
|
wallet.save()
|
|
|
|
if not skip_on_startup:
|
|
|
|
with self.conf.update_config() as c:
|
|
|
|
c.wallets += [wallet_id]
|
|
|
|
return wallet
|
|
|
|
|
|
|
|
async def wallet_add(self, wallet_id):
|
|
|
|
"""
|
|
|
|
Add existing wallet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_add (<wallet_id> | --wallet_id=<wallet_id>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet file name
|
|
|
|
|
|
|
|
Returns: {Wallet}
|
|
|
|
"""
|
|
|
|
wallet_path = os.path.join(self.conf.wallet_dir, 'wallets', wallet_id)
|
2020-05-06 16:53:31 +02:00
|
|
|
for wallet in self.wallets.wallets:
|
2020-05-01 15:33:58 +02:00
|
|
|
if wallet.id == wallet_id:
|
|
|
|
raise Exception(f"Wallet at path '{wallet_path}' is already loaded.")
|
|
|
|
if not os.path.exists(wallet_path):
|
|
|
|
raise Exception(f"Wallet at path '{wallet_path}' was not found.")
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.import_wallet(wallet_path)
|
2020-05-01 15:33:58 +02:00
|
|
|
if self.ledger.sync.network.is_connected:
|
|
|
|
for account in wallet.accounts:
|
|
|
|
await self.ledger.subscribe_account(account)
|
|
|
|
return wallet
|
|
|
|
|
|
|
|
async def wallet_remove(self, wallet_id):
|
|
|
|
"""
|
|
|
|
Remove an existing wallet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_remove (<wallet_id> | --wallet_id=<wallet_id>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) name of wallet to remove
|
|
|
|
|
|
|
|
Returns: {Wallet}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_wallet_or_error(wallet_id)
|
|
|
|
self.wallets.wallets.remove(wallet)
|
2020-05-01 15:33:58 +02:00
|
|
|
for account in wallet.accounts:
|
|
|
|
await self.ledger.unsubscribe_account(account)
|
|
|
|
return wallet
|
|
|
|
|
|
|
|
async def wallet_balance(self, wallet_id=None, confirmations=0):
|
|
|
|
"""
|
|
|
|
Return the balance of a wallet
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_balance [--wallet_id=<wallet_id>] [--confirmations=<confirmations>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) balance for specific wallet
|
|
|
|
--confirmations=<confirmations> : (int) Only include transactions with this many
|
|
|
|
confirmed blocks.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(decimal) amount of lbry credits in wallet
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
balance = await self.ledger.get_detailed_balance(
|
|
|
|
accounts=wallet.accounts, confirmations=confirmations
|
|
|
|
)
|
|
|
|
return dict_values_to_lbc(balance)
|
|
|
|
|
|
|
|
async def wallet_status(self, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Status of wallet including encryption/lock state.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_status [<wallet_id> | --wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) status of specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Dictionary of wallet status information.
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
if self.wallets is None:
|
2020-05-01 15:33:58 +02:00
|
|
|
return {'is_encrypted': None, 'is_syncing': None, 'is_locked': None}
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
return {
|
|
|
|
'is_encrypted': wallet.is_encrypted,
|
|
|
|
'is_syncing': len(self.ledger._update_tasks) > 0,
|
|
|
|
'is_locked': wallet.is_locked
|
|
|
|
}
|
|
|
|
|
|
|
|
async def wallet_unlock(self, password, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Unlock an encrypted wallet
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_unlock (<password> | --password=<password>) [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--password=<password> : (str) password to use for unlocking
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is unlocked, otherwise false
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
return self.wallets.get_or_default(wallet_id).unlock(password)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_lock(self, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Lock an unlocked wallet
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_lock [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is locked, otherwise false
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
return self.wallets.get_or_default(wallet_id).lock()
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_decrypt(self, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Decrypt an encrypted wallet, this will remove the wallet password. The wallet must be unlocked to decrypt it
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_decrypt [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
return self.wallets.get_or_default(wallet_id).decrypt()
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_encrypt(self, new_password, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Encrypt an unencrypted wallet with a password
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_encrypt (<new_password> | --new_password=<new_password>)
|
|
|
|
[--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--new_password=<new_password> : (str) password to encrypt account
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if wallet is decrypted, otherwise false
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
return self.wallets.get_or_default(wallet_id).encrypt(new_password)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def wallet_send(
|
|
|
|
self, amount, addresses, wallet_id=None,
|
|
|
|
change_account_id=None, funding_account_ids=None, preview=False):
|
|
|
|
"""
|
|
|
|
Send the same number of credits to multiple addresses using all accounts in wallet to
|
|
|
|
fund the transaction and the default account to receive any change.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
wallet_send <amount> <addresses>... [--wallet_id=<wallet_id>] [--preview]
|
|
|
|
[--change_account_id=None] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--change_account_id=<wallet_id> : (str) account where change will go
|
|
|
|
--funding_account_ids=<funding_account_ids> : (str) accounts to fund the transaction
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
account = wallet.accounts.get_or_default(change_account_id)
|
|
|
|
accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
amount = self.get_dewies_or_error("amount", amount)
|
|
|
|
|
|
|
|
if addresses and not isinstance(addresses, list):
|
|
|
|
addresses = [addresses]
|
|
|
|
|
|
|
|
outputs = []
|
|
|
|
for address in addresses:
|
|
|
|
self.valid_address_or_error(address)
|
|
|
|
outputs.append(
|
|
|
|
Output.pay_pubkey_hash(
|
|
|
|
amount, self.ledger.ledger.address_to_hash160(address)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[], outputs, accounts, account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.ledger.broadcast(tx)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_credits_sent())
|
|
|
|
else:
|
|
|
|
await self.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
ACCOUNT_DOC = """
|
|
|
|
Create, modify and inspect wallet accounts.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def account_list(
|
|
|
|
self, account_id=None, wallet_id=None, confirmations=0,
|
|
|
|
include_claims=False, show_seed=False, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List details of all of the accounts or a specific account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_list [<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--confirmations=<confirmations>]
|
|
|
|
[--include_claims] [--show_seed]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
|
|
|
account will be given
|
|
|
|
--wallet_id=<wallet_id> : (str) accounts in specific wallet
|
|
|
|
--confirmations=<confirmations> : (int) required confirmations (default: 0)
|
|
|
|
--include_claims : (bool) include claims, requires than a
|
|
|
|
LBC account is specified (default: false)
|
|
|
|
--show_seed : (bool) show the seed for the account
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Account]}
|
|
|
|
"""
|
|
|
|
kwargs = {
|
|
|
|
'confirmations': confirmations,
|
|
|
|
'show_seed': show_seed
|
|
|
|
}
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
return paginate_list([await wallet.get_account_or_error(account_id).get_details(**kwargs)], 1, 1)
|
|
|
|
else:
|
|
|
|
return paginate_list(await wallet.get_detailed_accounts(**kwargs), page, page_size)
|
|
|
|
|
|
|
|
async def account_balance(self, account_id=None, wallet_id=None, confirmations=0):
|
|
|
|
"""
|
|
|
|
Return the balance of an account
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_balance [<account_id>] [<address> | --address=<address>] [--wallet_id=<wallet_id>]
|
|
|
|
[<confirmations> | --confirmations=<confirmations>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) If provided only the balance for this
|
|
|
|
account will be given. Otherwise default account.
|
|
|
|
--wallet_id=<wallet_id> : (str) balance for specific wallet
|
|
|
|
--confirmations=<confirmations> : (int) Only include transactions with this many
|
|
|
|
confirmed blocks.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(decimal) amount of lbry credits in wallet
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
balance = await account.get_detailed_balance(
|
|
|
|
confirmations=confirmations, reserved_subtotals=True,
|
|
|
|
)
|
|
|
|
return dict_values_to_lbc(balance)
|
|
|
|
|
|
|
|
async def account_add(
|
|
|
|
self, account_name, wallet_id=None, single_key=False,
|
|
|
|
seed=None, private_key=None, public_key=None):
|
|
|
|
"""
|
|
|
|
Add a previously created account from a seed, private key or public key (read-only).
|
|
|
|
Specify --single_key for single address or vanity address accounts.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_add (<account_name> | --account_name=<account_name>)
|
|
|
|
(--seed=<seed> | --private_key=<private_key> | --public_key=<public_key>)
|
|
|
|
[--single_key] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_name=<account_name> : (str) name of the account to add
|
|
|
|
--seed=<seed> : (str) seed to generate new account from
|
|
|
|
--private_key=<private_key> : (str) private key for new account
|
|
|
|
--public_key=<public_key> : (str) public key for new account
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns: {Account}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
account = Account.from_dict(
|
|
|
|
self.ledger, wallet, {
|
|
|
|
'name': account_name,
|
|
|
|
'seed': seed,
|
|
|
|
'private_key': private_key,
|
|
|
|
'public_key': public_key,
|
|
|
|
'address_generator': {
|
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
wallet.save()
|
|
|
|
if self.ledger.sync.network.is_connected:
|
|
|
|
await self.ledger.subscribe_account(account)
|
|
|
|
return account
|
|
|
|
|
|
|
|
async def account_create(self, account_name, single_key=False, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Create a new account. Specify --single_key if you want to use
|
|
|
|
the same address for all transactions (not recommended).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_create (<account_name> | --account_name=<account_name>)
|
|
|
|
[--single_key] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_name=<account_name> : (str) name of the account to create
|
|
|
|
--single_key : (bool) create single key account, default is multi-key
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns: {Account}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
account = Account.generate(
|
|
|
|
self.ledger.ledger, wallet, account_name, {
|
|
|
|
'name': SingleKey.name if single_key else HierarchicalDeterministic.name
|
|
|
|
}
|
|
|
|
)
|
|
|
|
wallet.save()
|
|
|
|
if self.ledger.sync.network.is_connected:
|
|
|
|
await self.ledger.sync.subscribe_account(account)
|
|
|
|
return account
|
|
|
|
|
|
|
|
async def account_remove(self, account_id, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Remove an existing account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_remove (<account_id> | --account_id=<account_id>) [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to remove
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns: {Account}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
wallet.accounts.remove(account)
|
|
|
|
wallet.save()
|
|
|
|
return account
|
|
|
|
|
|
|
|
async def account_set(
|
|
|
|
self, account_id, wallet_id=None, default=False, new_name=None,
|
|
|
|
change_gap=None, change_max_uses=None, receiving_gap=None, receiving_max_uses=None):
|
|
|
|
"""
|
|
|
|
Change various settings on an account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_set (<account_id> | --account_id=<account_id>) [--wallet_id=<wallet_id>]
|
|
|
|
[--default] [--new_name=<new_name>]
|
|
|
|
[--change_gap=<change_gap>] [--change_max_uses=<change_max_uses>]
|
|
|
|
[--receiving_gap=<receiving_gap>] [--receiving_max_uses=<receiving_max_uses>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to change
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--default : (bool) make this account the default
|
|
|
|
--new_name=<new_name> : (str) new name for the account
|
|
|
|
--receiving_gap=<receiving_gap> : (int) set the gap for receiving addresses
|
|
|
|
--receiving_max_uses=<receiving_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a receiving address
|
|
|
|
--change_gap=<change_gap> : (int) set the gap for change addresses
|
|
|
|
--change_max_uses=<change_max_uses> : (int) set the maximum number of times to
|
|
|
|
use a change address
|
|
|
|
|
|
|
|
Returns: {Account}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
change_made = False
|
|
|
|
|
|
|
|
if account.receiving.name == HierarchicalDeterministic.name:
|
|
|
|
address_changes = {
|
|
|
|
'change': {'gap': change_gap, 'maximum_uses_per_address': change_max_uses},
|
|
|
|
'receiving': {'gap': receiving_gap, 'maximum_uses_per_address': receiving_max_uses},
|
|
|
|
}
|
|
|
|
for chain_name in address_changes:
|
|
|
|
chain = getattr(account, chain_name)
|
|
|
|
for attr, value in address_changes[chain_name].items():
|
|
|
|
if value is not None:
|
|
|
|
setattr(chain, attr, value)
|
|
|
|
change_made = True
|
|
|
|
|
|
|
|
if new_name is not None:
|
|
|
|
account.name = new_name
|
|
|
|
change_made = True
|
|
|
|
|
|
|
|
if default and wallet.default_account != account:
|
|
|
|
wallet.accounts.remove(account)
|
|
|
|
wallet.accounts.insert(0, account)
|
|
|
|
change_made = True
|
|
|
|
|
|
|
|
if change_made:
|
|
|
|
account.modified_on = time.time()
|
|
|
|
wallet.save()
|
|
|
|
|
|
|
|
return account
|
|
|
|
|
|
|
|
async def account_max_address_gap(self, account_id, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Finds ranges of consecutive addresses that are unused and returns the length
|
|
|
|
of the longest such range: for change and receiving address chains. This is
|
|
|
|
useful to figure out ideal values to set for 'receiving_gap' and 'change_gap'
|
|
|
|
account settings.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_max_address_gap (<account_id> | --account_id=<account_id>)
|
|
|
|
[--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) account for which to get max gaps
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) maximum gap for change and receiving addresses
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
return wallet.get_account_or_error(account_id).get_max_gap()
|
|
|
|
|
|
|
|
async def account_fund(self, to_account=None, from_account=None, amount='0.0',
|
|
|
|
everything=False, outputs=1, broadcast=False, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Transfer some amount (or --everything) to an account from another
|
|
|
|
account (can be the same account). Amounts are interpreted as LBC.
|
|
|
|
You can also spread the transfer across a number of --outputs (cannot
|
|
|
|
be used together with --everything).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_fund [<to_account> | --to_account=<to_account>]
|
|
|
|
[<from_account> | --from_account=<from_account>]
|
|
|
|
(<amount> | --amount=<amount> | --everything)
|
|
|
|
[<outputs> | --outputs=<outputs>] [--wallet_id=<wallet_id>]
|
|
|
|
[--broadcast]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--to_account=<to_account> : (str) send to this account
|
|
|
|
--from_account=<from_account> : (str) spend from this account
|
|
|
|
--amount=<amount> : (str) the amount to transfer lbc
|
|
|
|
--everything : (bool) transfer everything (excluding claims), default: false.
|
|
|
|
--outputs=<outputs> : (int) split payment across many outputs, default: 1.
|
|
|
|
--wallet_id=<wallet_id> : (str) limit operation to specific wallet.
|
|
|
|
--broadcast : (bool) actually broadcast the transaction, default: false.
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
to_account = wallet.accounts.get_or_default(to_account)
|
|
|
|
from_account = wallet.accounts.get_or_default(from_account)
|
2020-05-01 15:33:58 +02:00
|
|
|
amount = self.get_dewies_or_error('amount', amount) if amount else None
|
|
|
|
if not isinstance(outputs, int):
|
|
|
|
raise ValueError("--outputs must be an integer.")
|
|
|
|
if everything and outputs > 1:
|
|
|
|
raise ValueError("Using --everything along with --outputs is not supported.")
|
|
|
|
return from_account.fund(
|
|
|
|
to_account=to_account, amount=amount, everything=everything,
|
|
|
|
outputs=outputs, broadcast=broadcast
|
|
|
|
)
|
|
|
|
|
|
|
|
async def account_send(self, amount, addresses, account_id=None, wallet_id=None, preview=False):
|
|
|
|
"""
|
|
|
|
Send the same number of credits to multiple addresses from a specific account (or default account).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
account_send <amount> <addresses>... [--account_id=<account_id>] [--wallet_id=<wallet_id>] [--preview]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) account to fund the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
|
|
|
return self.wallet_send(
|
|
|
|
amount=amount, addresses=addresses, wallet_id=wallet_id,
|
|
|
|
change_account_id=account_id, funding_account_ids=[account_id] if account_id else [],
|
|
|
|
preview=preview
|
|
|
|
)
|
|
|
|
|
|
|
|
SYNC_DOC = """
|
|
|
|
Wallet synchronization.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def sync_hash(self, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Deterministic hash of the wallet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
sync_hash [<wallet_id> | --wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet for which to generate hash
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) sha256 hash of wallet
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
return hexlify(wallet.hash).decode()
|
|
|
|
|
|
|
|
async def sync_apply(self, password, data=None, wallet_id=None, blocking=False):
|
|
|
|
"""
|
|
|
|
Apply incoming synchronization data, if provided, and return a sync hash and update wallet data.
|
|
|
|
|
|
|
|
Wallet must be unlocked to perform this operation.
|
|
|
|
|
|
|
|
If "encrypt-on-disk" preference is True and supplied password is different from local password,
|
|
|
|
or there is no local password (because local wallet was not encrypted), then the supplied password
|
|
|
|
will be used for local encryption (overwriting previous local encryption password).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
sync_apply <password> [--data=<data>] [--wallet_id=<wallet_id>] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--password=<password> : (str) password to decrypt incoming and encrypt outgoing data
|
|
|
|
--data=<data> : (str) incoming sync data, if any
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet being sync'ed
|
|
|
|
--blocking : (bool) wait until any new accounts have sync'ed
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(map) sync hash and data
|
|
|
|
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
wallet_changed = False
|
|
|
|
if data is not None:
|
2020-05-06 16:53:31 +02:00
|
|
|
added_accounts = wallet.merge(self.wallets, password, data)
|
2020-05-01 15:33:58 +02:00
|
|
|
if added_accounts and self.ledger.sync.network.is_connected:
|
|
|
|
if blocking:
|
|
|
|
await asyncio.wait([
|
|
|
|
a.ledger.subscribe_account(a) for a in added_accounts
|
|
|
|
])
|
|
|
|
else:
|
|
|
|
for new_account in added_accounts:
|
|
|
|
asyncio.create_task(self.ledger.subscribe_account(new_account))
|
|
|
|
wallet_changed = True
|
|
|
|
if wallet.preferences.get(ENCRYPT_ON_DISK, False) and password != wallet.encryption_password:
|
|
|
|
wallet.encryption_password = password
|
|
|
|
wallet_changed = True
|
|
|
|
if wallet_changed:
|
|
|
|
wallet.save()
|
|
|
|
encrypted = wallet.pack(password)
|
|
|
|
return {
|
|
|
|
'hash': self.sync_hash(wallet_id),
|
|
|
|
'data': encrypted.decode()
|
|
|
|
}
|
|
|
|
|
|
|
|
ADDRESS_DOC = """
|
|
|
|
List, generate and verify addresses. Golomb-Rice coding filters for addresses.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def address_is_mine(self, address, account_id=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Checks if an address is associated with the current wallet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
address_is_mine (<address> | --address=<address>)
|
|
|
|
[<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--address=<address> : (str) address to check
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true, if address is associated with current wallet
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
match = await self.ledger.db.get_address(address=address, accounts=[account])
|
|
|
|
if match is not None:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
async def address_list(self, address=None, account_id=None, wallet_id=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List account addresses or details of single address.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
address_list [--address=<address>] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--address=<address> : (str) just show details for single address
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Address]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
constraints = {}
|
|
|
|
if address:
|
|
|
|
constraints['address'] = address
|
|
|
|
if account_id:
|
|
|
|
constraints['accounts'] = [wallet.get_account_or_error(account_id)]
|
|
|
|
else:
|
|
|
|
constraints['accounts'] = wallet.accounts
|
|
|
|
return paginate_rows(
|
|
|
|
self.ledger.get_addresses,
|
|
|
|
self.ledger.get_address_count,
|
|
|
|
page, page_size, **constraints
|
|
|
|
)
|
|
|
|
|
|
|
|
async def address_unused(self, account_id=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Return an address containing no balance, will create
|
|
|
|
a new address if there is none.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
address_unused [--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns: {Address}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
return wallet.accounts.get_or_default(account_id).receiving.get_or_create_usable_address()
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
async def address_block_filters(self):
|
|
|
|
return await self.service.get_block_address_filters()
|
|
|
|
|
|
|
|
async def address_transaction_filters(self, block_hash):
|
|
|
|
return await self.service.get_transaction_address_filters(block_hash)
|
|
|
|
|
|
|
|
FILE_DOC = """
|
|
|
|
File management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def file_list(
|
|
|
|
self, sort=None, reverse=False, comparison=None,
|
|
|
|
wallet_id=None, page=None, page_size=None, **kwargs):
|
|
|
|
"""
|
|
|
|
List files limited by optional filters
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
|
|
|
|
[--rowid=<rowid>] [--added_on=<added_on>] [--claim_id=<claim_id>]
|
|
|
|
[--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
|
|
|
|
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
|
|
|
|
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
|
|
|
|
[--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
|
|
|
|
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
|
|
|
|
[--page=<page>] [--page_size=<page_size>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--added_on=<added_on> : (int) get file with matching time of insertion
|
|
|
|
--claim_id=<claim_id> : (str) get file with matching claim id
|
|
|
|
--outpoint=<outpoint> : (str) get file with matching claim outpoint
|
|
|
|
--txid=<txid> : (str) get file with matching claim txid
|
|
|
|
--nout=<nout> : (int) get file with matching claim nout
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) get file with matching channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) get file with matching channel name
|
|
|
|
--claim_name=<claim_name> : (str) get file with matching claim name
|
|
|
|
--blobs_in_stream<blobs_in_stream> : (int) get file with matching blobs in stream
|
|
|
|
--blobs_remaining=<blobs_remaining> : (int) amount of remaining blobs to download
|
|
|
|
--sort=<sort_by> : (str) field to sort by (one of the above filter fields)
|
|
|
|
--comparison=<comparison> : (str) logical comparison, (eq | ne | g | ge | l | le)
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--wallet_id=<wallet_id> : (str) add purchase receipts from this wallet
|
|
|
|
|
|
|
|
Returns: {Paginated[File]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
sort = sort or 'rowid'
|
|
|
|
comparison = comparison or 'eq'
|
|
|
|
paginated = paginate_list(
|
|
|
|
self.stream_manager.get_filtered_streams(sort, reverse, comparison, **kwargs), page, page_size
|
|
|
|
)
|
|
|
|
if paginated['items']:
|
|
|
|
receipts = {
|
|
|
|
txo.purchased_claim_id: txo for txo in
|
|
|
|
await self.ledger.db.get_purchases(
|
|
|
|
accounts=wallet.accounts,
|
|
|
|
purchased_claim_hash__in=[unhexlify(s.claim_id)[::-1] for s in paginated['items']]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
for stream in paginated['items']:
|
|
|
|
stream.purchase_receipt = receipts.get(stream.claim_id)
|
|
|
|
return paginated
|
|
|
|
|
|
|
|
async def file_set_status(self, status, **kwargs):
|
|
|
|
"""
|
|
|
|
Start or stop downloading a file
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
file_set_status (<status> | --status=<status>) [--sd_hash=<sd_hash>]
|
|
|
|
[--file_name=<file_name>] [--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--status=<status> : (str) one of "start" or "stop"
|
|
|
|
--sd_hash=<sd_hash> : (str) set status of file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) set status of file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) set status of file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) set status of file with matching row id
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Confirmation message
|
|
|
|
"""
|
|
|
|
|
|
|
|
if status not in ['start', 'stop']:
|
|
|
|
raise Exception('Status must be "start" or "stop".')
|
|
|
|
|
|
|
|
streams = self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
if not streams:
|
|
|
|
raise Exception(f'Unable to find a file for {kwargs}')
|
|
|
|
stream = streams[0]
|
|
|
|
if status == 'start' and not stream.running:
|
|
|
|
await stream.save_file(node=self.stream_manager.node)
|
|
|
|
msg = "Resumed download"
|
|
|
|
elif status == 'stop' and stream.running:
|
|
|
|
await stream.stop()
|
|
|
|
msg = "Stopped download"
|
|
|
|
else:
|
|
|
|
msg = (
|
|
|
|
"File was already being downloaded" if status == 'start'
|
|
|
|
else "File was already stopped"
|
|
|
|
)
|
|
|
|
return msg
|
|
|
|
|
|
|
|
async def file_delete(self, delete_from_download_dir=False, delete_all=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Delete a LBRY file
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
file_delete [--delete_from_download_dir] [--delete_all] [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
|
|
|
|
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
|
|
|
|
[--channel_name=<channel_name>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--delete_from_download_dir : (bool) delete file from download directory,
|
|
|
|
instead of just deleting blobs
|
|
|
|
--delete_all : (bool) if there are multiple matching files,
|
|
|
|
allow the deletion of multiple files.
|
|
|
|
Otherwise do not delete anything.
|
|
|
|
--sd_hash=<sd_hash> : (str) delete by file sd hash
|
|
|
|
--file_name=<file_name> : (str) delete by file name in downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) delete by file stream hash
|
|
|
|
--rowid=<rowid> : (int) delete by file row id
|
|
|
|
--claim_id=<claim_id> : (str) delete by file claim id
|
|
|
|
--txid=<txid> : (str) delete by file claim txid
|
|
|
|
--nout=<nout> : (int) delete by file claim nout
|
|
|
|
--claim_name=<claim_name> : (str) delete by file claim name
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) delete by file channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) delete by file channel claim name
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if deletion was successful
|
|
|
|
"""
|
|
|
|
|
|
|
|
streams = self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
|
|
|
|
if len(streams) > 1:
|
|
|
|
if not delete_all:
|
|
|
|
log.warning("There are %i files to delete, use narrower filters to select one",
|
|
|
|
len(streams))
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
log.warning("Deleting %i files",
|
|
|
|
len(streams))
|
|
|
|
|
|
|
|
if not streams:
|
|
|
|
log.warning("There is no file to delete")
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
for stream in streams:
|
|
|
|
message = f"Deleted file {stream.file_name}"
|
|
|
|
await self.stream_manager.delete_stream(stream, delete_file=delete_from_download_dir)
|
|
|
|
log.info(message)
|
|
|
|
result = True
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def file_save(self, file_name=None, download_directory=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Start saving a file to disk.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
file_save [--file_name=<file_name>] [--download_directory=<download_directory>] [--sd_hash=<sd_hash>]
|
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>] [--claim_id=<claim_id>] [--txid=<txid>]
|
|
|
|
[--nout=<nout>] [--claim_name=<claim_name>] [--channel_claim_id=<channel_claim_id>]
|
|
|
|
[--channel_name=<channel_name>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--file_name=<file_name> : (str) file name to save to
|
|
|
|
--download_directory=<download_directory> : (str) directory to save into
|
|
|
|
--sd_hash=<sd_hash> : (str) save file with matching sd hash
|
|
|
|
--stream_hash=<stream_hash> : (str) save file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) save file with matching row id
|
|
|
|
--claim_id=<claim_id> : (str) save file with matching claim id
|
|
|
|
--txid=<txid> : (str) save file with matching claim txid
|
|
|
|
--nout=<nout> : (int) save file with matching claim nout
|
|
|
|
--claim_name=<claim_name> : (str) save file with matching claim name
|
|
|
|
--channel_claim_id=<channel_claim_id> : (str) save file with matching channel claim id
|
|
|
|
--channel_name=<channel_name> : (str) save file with matching channel claim name
|
|
|
|
|
|
|
|
Returns: {File}
|
|
|
|
"""
|
|
|
|
|
|
|
|
streams = self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
|
|
|
|
if len(streams) > 1:
|
|
|
|
log.warning("There are %i matching files, use narrower filters to select one", len(streams))
|
|
|
|
return False
|
|
|
|
if not streams:
|
|
|
|
log.warning("There is no file to save")
|
|
|
|
return False
|
|
|
|
stream = streams[0]
|
|
|
|
await stream.save_file(file_name, download_directory)
|
|
|
|
return stream
|
|
|
|
|
|
|
|
PURCHASE_DOC = """
|
|
|
|
List and make purchases of claims.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def purchase_list(
|
|
|
|
self, claim_id=None, resolve=False, account_id=None, wallet_id=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List my claim purchases.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
purchase_list [<claim_id> | --claim_id=<claim_id>] [--resolve]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) purchases for specific claim
|
|
|
|
--resolve : (str) include resolved claim information
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
constraints = {
|
|
|
|
"wallet": wallet,
|
|
|
|
"accounts": [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
|
|
|
|
"resolve": resolve,
|
|
|
|
}
|
|
|
|
if claim_id:
|
|
|
|
constraints["purchased_claim_id"] = claim_id
|
|
|
|
return paginate_rows(
|
|
|
|
self.ledger.get_purchases,
|
|
|
|
self.ledger.get_purchase_count,
|
|
|
|
page, page_size, **constraints
|
|
|
|
)
|
|
|
|
|
|
|
|
async def purchase_create(
|
|
|
|
self, claim_id=None, url=None, wallet_id=None, funding_account_ids=None,
|
|
|
|
allow_duplicate_purchase=False, override_max_key_fee=False, preview=False, blocking=False):
|
|
|
|
"""
|
|
|
|
Purchase a claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
purchase_create (--claim_id=<claim_id> | --url=<url>) [--wallet_id=<wallet_id>]
|
|
|
|
[--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--allow_duplicate_purchase] [--override_max_key_fee] [--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim id of claim to purchase
|
|
|
|
--url=<url> : (str) lookup claim to purchase by url
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--allow_duplicate_purchase : (bool) allow purchasing claim_id you already own
|
|
|
|
--override_max_key_fee : (bool) ignore max key fee for this purchase
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
txo = None
|
|
|
|
if claim_id:
|
|
|
|
txo = await self.ledger.get_claim_by_claim_id(accounts, claim_id, include_purchase_receipt=True)
|
|
|
|
if not isinstance(txo, Output) or not txo.is_claim:
|
|
|
|
raise Exception(f"Could not find claim with claim_id '{claim_id}'. ")
|
|
|
|
elif url:
|
|
|
|
txo = (await self.ledger.resolve(accounts, [url], include_purchase_receipt=True))[url]
|
|
|
|
if not isinstance(txo, Output) or not txo.is_claim:
|
|
|
|
raise Exception(f"Could not find claim with url '{url}'. ")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Missing argument claim_id or url. ")
|
|
|
|
if not allow_duplicate_purchase and txo.purchase_receipt:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a purchase for claim_id '{claim_id}'. "
|
|
|
|
f"Use --allow-duplicate-purchase flag to override."
|
|
|
|
)
|
|
|
|
claim = txo.claim
|
|
|
|
if not claim.is_stream or not claim.stream.has_fee:
|
|
|
|
raise Exception(f"Claim '{claim_id}' does not have a purchase price.")
|
2020-05-06 16:53:31 +02:00
|
|
|
tx = await self.wallets.create_purchase_transaction(
|
2020-05-01 15:33:58 +02:00
|
|
|
accounts, txo, self.exchange_rate_manager, override_max_key_fee
|
|
|
|
)
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
else:
|
|
|
|
await self.ledger.release_tx(tx)
|
|
|
|
return tx
|
|
|
|
|
|
|
|
CLAIM_DOC = """
|
|
|
|
List and search all types of claims.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def claim_list(self, claim_type=None, **kwargs):
|
|
|
|
"""
|
|
|
|
List my stream and channel claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_list [--claim_type=<claim_type>...] [--claim_id=<claim_id>...] [--name=<name>...] [--is_spent]
|
|
|
|
[--channel_id=<channel_id>...] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
[--resolve] [--order_by=<order_by>] [--no_totals] [--include_received_tips]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_type=<claim_type> : (str or list) claim type: channel, stream, repost, collection
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--channel_id=<channel_id> : (str or list) streams in this channel
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--is_spent : (bool) shows previous claim updates and abandons
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--resolve : (bool) resolves each claim to provide additional metadata
|
|
|
|
--order_by=<order_by> : (str) field to order by: 'name', 'height', 'amount'
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
--include_received_tips : (bool) calculate the amount of tips recieved for claim outputs
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
|
|
|
kwargs['type'] = claim_type or CLAIM_TYPE_NAMES
|
|
|
|
if 'is_spent' not in kwargs:
|
|
|
|
kwargs['is_not_spent'] = True
|
|
|
|
return self.txo_list(**kwargs)
|
|
|
|
|
|
|
|
async def claim_search(self, **kwargs):
|
|
|
|
"""
|
|
|
|
Search for stream and channel claims on the blockchain.
|
|
|
|
|
|
|
|
Arguments marked with "supports equality constraints" allow prepending the
|
|
|
|
value with an equality constraint such as '>', '>=', '<' and '<='
|
|
|
|
eg. --height=">400000" would limit results to only claims above 400k block height.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
claim_search [<name> | --name=<name>] [--text=<text>] [--txid=<txid>] [--nout=<nout>]
|
|
|
|
[--claim_id=<claim_id> | --claim_ids=<claim_ids>...]
|
|
|
|
[--channel=<channel> |
|
|
|
|
[[--channel_ids=<channel_ids>...] [--not_channel_ids=<not_channel_ids>...]]]
|
|
|
|
[--has_channel_signature] [--valid_channel_signature | --invalid_channel_signature]
|
|
|
|
[--is_controlling] [--release_time=<release_time>] [--public_key_id=<public_key_id>]
|
|
|
|
[--timestamp=<timestamp>] [--creation_timestamp=<creation_timestamp>]
|
|
|
|
[--height=<height>] [--creation_height=<creation_height>]
|
|
|
|
[--activation_height=<activation_height>] [--expiration_height=<expiration_height>]
|
|
|
|
[--amount=<amount>] [--effective_amount=<effective_amount>]
|
|
|
|
[--support_amount=<support_amount>] [--trending_group=<trending_group>]
|
|
|
|
[--trending_mixed=<trending_mixed>] [--trending_local=<trending_local>]
|
|
|
|
[--trending_global=<trending_global]
|
|
|
|
[--reposted_claim_id=<reposted_claim_id>] [--reposted=<reposted>]
|
|
|
|
[--claim_type=<claim_type>] [--stream_types=<stream_types>...] [--media_types=<media_types>...]
|
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>]
|
|
|
|
[--duration=<duration>]
|
|
|
|
[--any_tags=<any_tags>...] [--all_tags=<all_tags>...] [--not_tags=<not_tags>...]
|
|
|
|
[--any_languages=<any_languages>...] [--all_languages=<all_languages>...]
|
|
|
|
[--not_languages=<not_languages>...]
|
|
|
|
[--any_locations=<any_locations>...] [--all_locations=<all_locations>...]
|
|
|
|
[--not_locations=<not_locations>...]
|
|
|
|
[--order_by=<order_by>...] [--page=<page>] [--page_size=<page_size>]
|
|
|
|
[--wallet_id=<wallet_id>] [--include_purchase_receipt] [--include_is_my_output]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) claim name (normalized)
|
|
|
|
--text=<text> : (str) full text search
|
|
|
|
--claim_id=<claim_id> : (str) full or partial claim id
|
|
|
|
--claim_ids=<claim_ids> : (list) list of full claim ids
|
|
|
|
--txid=<txid> : (str) transaction id
|
|
|
|
--nout=<nout> : (str) position in the transaction
|
|
|
|
--channel=<channel> : (str) claims signed by this channel (argument is
|
|
|
|
a URL which automatically gets resolved),
|
|
|
|
see --channel_ids if you need to filter by
|
|
|
|
multiple channels at the same time,
|
|
|
|
includes claims with invalid signatures,
|
|
|
|
use in conjunction with --valid_channel_signature
|
|
|
|
--channel_ids=<channel_ids> : (list) claims signed by any of these channels
|
|
|
|
(arguments must be claim ids of the channels),
|
|
|
|
includes claims with invalid signatures,
|
|
|
|
implies --has_channel_signature,
|
|
|
|
use in conjunction with --valid_channel_signature
|
|
|
|
--not_channel_ids=<not_channel_ids>: (list) exclude claims signed by any of these channels
|
|
|
|
(arguments must be claim ids of the channels)
|
|
|
|
--has_channel_signature : (bool) claims with a channel signature (valid or invalid)
|
|
|
|
--valid_channel_signature : (bool) claims with a valid channel signature or no signature,
|
|
|
|
use in conjunction with --has_channel_signature to
|
|
|
|
only get claims with valid signatures
|
|
|
|
--invalid_channel_signature : (bool) claims with invalid channel signature or no signature,
|
|
|
|
use in conjunction with --has_channel_signature to
|
|
|
|
only get claims with invalid signatures
|
|
|
|
--is_controlling : (bool) winning claims of their respective name
|
|
|
|
--public_key_id=<public_key_id> : (str) only return channels having this public key id, this is
|
|
|
|
the same key as used in the wallet file to map
|
|
|
|
channel certificate private keys: {'public_key_id': 'private key'}
|
|
|
|
--height=<height> : (int) last updated block height (supports equality constraints)
|
|
|
|
--timestamp=<timestamp> : (int) last updated timestamp (supports equality constraints)
|
|
|
|
--creation_height=<creation_height> : (int) created at block height (supports equality constraints)
|
|
|
|
--creation_timestamp=<creation_timestamp>: (int) created at timestamp (supports equality constraints)
|
|
|
|
--activation_height=<activation_height> : (int) height at which claim starts competing for name
|
|
|
|
(supports equality constraints)
|
|
|
|
--expiration_height=<expiration_height> : (int) height at which claim will expire
|
|
|
|
(supports equality constraints)
|
|
|
|
--release_time=<release_time> : (int) limit to claims self-described as having been
|
|
|
|
released to the public on or after this UTC
|
|
|
|
timestamp, when claim does not provide
|
|
|
|
a release time the publish time is used instead
|
|
|
|
(supports equality constraints)
|
|
|
|
--amount=<amount> : (int) limit by claim value (supports equality constraints)
|
|
|
|
--support_amount=<support_amount>: (int) limit by supports and tips received (supports
|
|
|
|
equality constraints)
|
|
|
|
--effective_amount=<effective_amount>: (int) limit by total value (initial claim value plus
|
|
|
|
all tips and supports received), this amount is
|
|
|
|
blank until claim has reached activation height
|
|
|
|
(supports equality constraints)
|
|
|
|
--trending_group=<trending_group>: (int) group numbers 1 through 4 representing the
|
|
|
|
trending groups of the content: 4 means
|
|
|
|
content is trending globally and independently,
|
|
|
|
3 means content is not trending globally but is
|
|
|
|
trending independently (locally), 2 means it is
|
|
|
|
trending globally but not independently and 1
|
|
|
|
means it's not trending globally or locally
|
|
|
|
(supports equality constraints)
|
|
|
|
--trending_mixed=<trending_mixed>: (int) trending amount taken from the global or local
|
|
|
|
value depending on the trending group:
|
|
|
|
4 - global value, 3 - local value, 2 - global
|
|
|
|
value, 1 - local value (supports equality
|
|
|
|
constraints)
|
|
|
|
--trending_local=<trending_local>: (int) trending value calculated relative only to
|
|
|
|
the individual contents past history (supports
|
|
|
|
equality constraints)
|
|
|
|
--trending_global=<trending_global>: (int) trending value calculated relative to all
|
|
|
|
trending content globally (supports
|
|
|
|
equality constraints)
|
|
|
|
--reposted_claim_id=<reposted_claim_id>: (str) all reposts of the specified original claim id
|
|
|
|
--reposted=<reposted> : (int) claims reposted this many times (supports
|
|
|
|
equality constraints)
|
|
|
|
--claim_type=<claim_type> : (str) filter by 'channel', 'stream' or 'unknown'
|
|
|
|
--stream_types=<stream_types> : (list) filter by 'video', 'image', 'document', etc
|
|
|
|
--media_types=<media_types> : (list) filter by 'video/mp4', 'image/png', etc
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency: LBC, BTC, USD
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee (supports equality constraints)
|
|
|
|
--duration=<duration> : (int) duration of video or audio in seconds
|
|
|
|
(supports equality constraints)
|
|
|
|
--any_tags=<any_tags> : (list) find claims containing any of the tags
|
|
|
|
--all_tags=<all_tags> : (list) find claims containing every tag
|
|
|
|
--not_tags=<not_tags> : (list) find claims not containing any of these tags
|
|
|
|
--any_languages=<any_languages> : (list) find claims containing any of the languages
|
|
|
|
--all_languages=<all_languages> : (list) find claims containing every language
|
|
|
|
--not_languages=<not_languages> : (list) find claims not containing any of these languages
|
|
|
|
--any_locations=<any_locations> : (list) find claims containing any of the locations
|
|
|
|
--all_locations=<all_locations> : (list) find claims containing every location
|
|
|
|
--not_locations=<not_locations> : (list) find claims not containing any of these locations
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--order_by=<order_by> : (list) field to order by, default is descending order, to do an
|
|
|
|
ascending order prepend ^ to the field name, eg. '^amount'
|
|
|
|
available fields: 'name', 'height', 'release_time',
|
|
|
|
'publish_time', 'amount', 'effective_amount',
|
|
|
|
'support_amount', 'trending_group', 'trending_mixed',
|
|
|
|
'trending_local', 'trending_global', 'activation_height'
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
--wallet_id=<wallet_id> : (str) wallet to check for claim purchase reciepts
|
|
|
|
--include_purchase_receipt : (bool) lookup and include a receipt if this wallet
|
|
|
|
has purchased the claim
|
|
|
|
--include_is_my_output : (bool) lookup and include a boolean indicating
|
|
|
|
if claim being resolved is yours
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(kwargs.pop('wallet_id', None))
|
2020-05-01 15:33:58 +02:00
|
|
|
if {'claim_id', 'claim_ids'}.issubset(kwargs):
|
|
|
|
raise ValueError("Only 'claim_id' or 'claim_ids' is allowed, not both.")
|
|
|
|
if kwargs.pop('valid_channel_signature', False):
|
|
|
|
kwargs['signature_valid'] = 1
|
|
|
|
if kwargs.pop('invalid_channel_signature', False):
|
|
|
|
kwargs['signature_valid'] = 0
|
|
|
|
page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', DEFAULT_PAGE_SIZE)), 50)
|
|
|
|
kwargs.update({'offset': page_size * (page_num - 1), 'limit': page_size})
|
2020-05-02 05:25:07 +02:00
|
|
|
txos, total, censored = await self.service.search_claims(wallet.accounts, **kwargs)
|
2020-05-01 15:33:58 +02:00
|
|
|
result = {
|
|
|
|
"items": txos,
|
2020-05-02 05:25:07 +02:00
|
|
|
#"blocked": censored,
|
2020-05-01 15:33:58 +02:00
|
|
|
"page": page_num,
|
|
|
|
"page_size": page_size
|
|
|
|
}
|
|
|
|
if not kwargs.pop('no_totals', False):
|
|
|
|
result['total_pages'] = int((total + (page_size - 1)) / page_size)
|
|
|
|
result['total_items'] = total
|
|
|
|
return result
|
|
|
|
|
|
|
|
CHANNEL_DOC = """
|
|
|
|
Create, update, abandon and list your channel claims.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def channel_create(
|
|
|
|
self, name, bid, allow_duplicate_name=False, account_id=None, wallet_id=None,
|
|
|
|
claim_address=None, funding_account_ids=None, preview=False, blocking=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Create a new channel by generating a channel private key and establishing an '@' prefixed claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
|
|
|
|
[--allow_duplicate_name=<allow_duplicate_name>]
|
|
|
|
[--title=<title>] [--description=<description>] [--email=<email>]
|
|
|
|
[--website_url=<website_url>] [--featured=<featured>...]
|
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
|
|
|
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the channel prefixed with '@'
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new channel even if one already exists with
|
|
|
|
given name. default: false.
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--email=<email> : (str) email of channel owner
|
|
|
|
--website_url=<website_url> : (str) website url
|
|
|
|
--featured=<featured> : (list) claim_ids of featured content in channel
|
|
|
|
--tags=<tags> : (list) content tags
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--cover_url=<cover_url> : (str) url of cover image
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the channel is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
|
|
|
self.service.ledger.valid_channel_name_or_error(name)
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
|
|
|
amount = self.ledger.get_dewies_or_error('bid', bid, positive_value=True)
|
2020-05-01 15:33:58 +02:00
|
|
|
claim_address = await account.get_valid_receiving_address(claim_address)
|
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
existing_channels, _ = await wallet.channels.list(claim_name=name)
|
2020-05-01 15:33:58 +02:00
|
|
|
if len(existing_channels) > 0:
|
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a channel under the name '{name}'. "
|
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
tx = await wallet.channels.create(
|
2020-05-01 15:33:58 +02:00
|
|
|
name, amount, account, funding_accounts, claim_address, preview, **kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.service.broadcast_or_release(tx, blocking)
|
|
|
|
else:
|
2020-05-06 16:53:31 +02:00
|
|
|
await self.service.release_tx(tx)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def channel_update(
|
|
|
|
self, claim_id, bid=None, account_id=None, wallet_id=None, claim_address=None,
|
|
|
|
funding_account_ids=None, new_signing_key=False, preview=False,
|
|
|
|
blocking=False, replace=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Update an existing channel claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_update (<claim_id> | --claim_id=<claim_id>) [<bid> | --bid=<bid>]
|
|
|
|
[--title=<title>] [--description=<description>] [--email=<email>]
|
|
|
|
[--website_url=<website_url>]
|
|
|
|
[--featured=<featured>...] [--clear_featured]
|
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
|
|
|
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--new_signing_key]
|
|
|
|
[--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking] [--replace]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the channel to update
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--email=<email> : (str) email of channel owner
|
|
|
|
--website_url=<website_url> : (str) website url
|
|
|
|
--featured=<featured> : (list) claim_ids of featured content in channel
|
|
|
|
--clear_featured : (bool) clear existing featured content (prior to adding new ones)
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--locations=<locations> : (list) locations of the channel, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--cover_url=<cover_url> : (str) url of cover image
|
|
|
|
--account_id=<account_id> : (str) account in which to look for channel (default: all)
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the channel is sent
|
|
|
|
--new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
--replace : (bool) instead of modifying specific values on
|
|
|
|
the channel, this will clear all existing values
|
|
|
|
and only save passed in values, useful for form
|
|
|
|
submissions where all values are always set
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
existing_channels = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, claim_hash=unhexlify(claim_id)[::-1]
|
|
|
|
)
|
|
|
|
if len(existing_channels) != 1:
|
|
|
|
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
|
|
|
|
raise Exception(
|
|
|
|
f"Can't find the channel '{claim_id}' in account(s) {account_ids}."
|
|
|
|
)
|
|
|
|
old_txo = existing_channels[0]
|
|
|
|
if not old_txo.claim.is_channel:
|
|
|
|
raise Exception(
|
|
|
|
f"A claim with id '{claim_id}' was found but it is not a channel."
|
|
|
|
)
|
|
|
|
|
|
|
|
if bid is not None:
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
else:
|
|
|
|
amount = old_txo.amount
|
|
|
|
|
|
|
|
if claim_address is not None:
|
|
|
|
self.valid_address_or_error(claim_address)
|
|
|
|
else:
|
|
|
|
claim_address = old_txo.get_address(account.ledger)
|
|
|
|
|
|
|
|
if replace:
|
|
|
|
claim = Claim()
|
|
|
|
claim.channel.public_key_bytes = old_txo.claim.channel.public_key_bytes
|
|
|
|
else:
|
|
|
|
claim = Claim.from_bytes(old_txo.claim.to_bytes())
|
|
|
|
claim.channel.update(**kwargs)
|
|
|
|
tx = await Transaction.claim_update(
|
|
|
|
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0]
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
if new_signing_key:
|
|
|
|
new_txo.generate_channel_private_key()
|
|
|
|
else:
|
|
|
|
new_txo.private_key = old_txo.private_key
|
|
|
|
|
|
|
|
new_txo.script.generate()
|
|
|
|
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
account.add_channel_private_key(new_txo.private_key)
|
|
|
|
wallet.save()
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
|
|
|
|
)])
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_new_channel())
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def channel_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None,
|
|
|
|
preview=False, blocking=True):
|
|
|
|
"""
|
|
|
|
Abandon one of my channel claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
if txid is not None and nout is not None:
|
|
|
|
claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout
|
|
|
|
)
|
|
|
|
elif claim_id is not None:
|
|
|
|
claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, claim_id=claim_id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not claims:
|
|
|
|
raise Exception('No claim found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in claims], [], [account], account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def channel_list(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
List my channel claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--name=<name>...] [--claim_id=<claim_id>...] [--is_spent]
|
|
|
|
[--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str or list) channel name
|
|
|
|
--claim_id=<claim_id> : (str or list) channel id
|
|
|
|
--is_spent : (bool) shows previous channel updates and abandons
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--resolve : (bool) resolves each channel to provide additional metadata
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
|
|
|
kwargs['type'] = 'channel'
|
|
|
|
if 'is_spent' not in kwargs:
|
|
|
|
kwargs['is_not_spent'] = True
|
|
|
|
return await self.txo_list(*args, **kwargs)
|
|
|
|
|
|
|
|
async def channel_export(self, channel_id=None, channel_name=None, account_id=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Export channel private key.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_export (<channel_id> | --channel_id=<channel_id> | --channel_name=<channel_name>)
|
|
|
|
[--account_id=<account_id>...] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--channel_id=<channel_id> : (str) claim id of channel to export
|
|
|
|
--channel_name=<channel_name> : (str) name of channel to export
|
|
|
|
--account_id=<account_id> : (str) one or more account ids for accounts
|
|
|
|
to look in for channels, defaults to
|
|
|
|
all accounts.
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) serialized channel private key
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
channel = await self.get_channel_or_error(wallet, account_id, channel_id, channel_name, for_signing=True)
|
|
|
|
address = channel.get_address(self.ledger)
|
|
|
|
public_key = await self.ledger.get_public_key_for_address(wallet, address)
|
|
|
|
if not public_key:
|
|
|
|
raise Exception("Can't find public key for address holding the channel.")
|
|
|
|
export = {
|
|
|
|
'name': channel.claim_name,
|
|
|
|
'channel_id': channel.claim_id,
|
|
|
|
'holding_address': address,
|
|
|
|
'holding_public_key': public_key.extended_key_string(),
|
|
|
|
'signing_private_key': channel.private_key.to_pem().decode()
|
|
|
|
}
|
|
|
|
return base58.b58encode(json.dumps(export, separators=(',', ':')))
|
|
|
|
|
|
|
|
async def channel_import(self, channel_data, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Import serialized channel private key (to allow signing new streams to the channel)
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
channel_import (<channel_data> | --channel_data=<channel_data>) [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--channel_data=<channel_data> : (str) serialized channel, as exported by channel export
|
|
|
|
--wallet_id=<wallet_id> : (str) import into specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Result dictionary
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
decoded = base58.b58decode(channel_data)
|
|
|
|
data = json.loads(decoded)
|
|
|
|
channel_private_key = ecdsa.SigningKey.from_pem(
|
|
|
|
data['signing_private_key'], hashfunc=hashlib.sha256
|
|
|
|
)
|
|
|
|
public_key_der = channel_private_key.get_verifying_key().to_der()
|
|
|
|
|
|
|
|
# check that the holding_address hasn't changed since the export was made
|
|
|
|
holding_address = data['holding_address']
|
|
|
|
channels, _, _, _ = await self.ledger.claim_search(
|
|
|
|
wallet.accounts, public_key_id=self.ledger.public_key_to_address(public_key_der)
|
|
|
|
)
|
|
|
|
if channels and channels[0].get_address(self.ledger) != holding_address:
|
|
|
|
holding_address = channels[0].get_address(self.ledger)
|
|
|
|
|
|
|
|
account = await self.ledger.get_account_for_address(wallet, holding_address)
|
|
|
|
if account:
|
|
|
|
# Case 1: channel holding address is in one of the accounts we already have
|
|
|
|
# simply add the certificate to existing account
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Case 2: channel holding address hasn't changed and thus is in the bundled read-only account
|
|
|
|
# create a single-address holding account to manage the channel
|
|
|
|
if holding_address == data['holding_address']:
|
|
|
|
account = Account.from_dict(self.ledger, wallet, {
|
|
|
|
'name': f"Holding Account For Channel {data['name']}",
|
|
|
|
'public_key': data['holding_public_key'],
|
|
|
|
'address_generator': {'name': 'single-address'}
|
|
|
|
})
|
|
|
|
if self.ledger.sync.network.is_connected:
|
|
|
|
await self.ledger.subscribe_account(account)
|
|
|
|
await self.ledger.sync._update_tasks.done.wait()
|
|
|
|
# Case 3: the holding address has changed and we can't create or find an account for it
|
|
|
|
else:
|
|
|
|
raise Exception(
|
|
|
|
"Channel owning account has changed since the channel was exported and "
|
|
|
|
"it is not an account to which you have access."
|
|
|
|
)
|
|
|
|
account.add_channel_private_key(channel_private_key)
|
|
|
|
wallet.save()
|
|
|
|
return f"Added channel signing key for {data['name']}."
|
|
|
|
|
|
|
|
STREAM_DOC = """
|
|
|
|
Create, update, abandon, list and inspect your stream claims.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def publish(self, name, **kwargs):
|
|
|
|
"""
|
|
|
|
Create or replace a stream claim at a given name (use 'stream create/update' for more control).
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
publish (<name> | --name=<name>) [--bid=<bid>] [--file_path=<file_path>]
|
|
|
|
[--validate_file] [--optimize_file]
|
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
|
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>]
|
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
|
|
|
[--channel_id=<channel_id> | --channel_name=<channel_name>]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
|
|
|
--validate_file : (bool) validate that the video container and encodings match
|
|
|
|
common web browser support or that optimization succeeds if specified.
|
|
|
|
FFmpeg is required
|
|
|
|
--optimize_file : (bool) transcode the video & audio if necessary to ensure
|
|
|
|
common web browser support. FFmpeg is required
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
|
|
|
--channel_name=<channel_name> : (str) name of publisher channel
|
|
|
|
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
|
|
|
self.valid_stream_name_or_error(name)
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(kwargs.get('wallet_id'))
|
2020-05-01 15:33:58 +02:00
|
|
|
if kwargs.get('account_id'):
|
|
|
|
accounts = [wallet.get_account_or_error(kwargs.get('account_id'))]
|
|
|
|
else:
|
|
|
|
accounts = wallet.accounts
|
|
|
|
claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, claim_name=name
|
|
|
|
)
|
|
|
|
if len(claims) == 0:
|
|
|
|
if 'bid' not in kwargs:
|
|
|
|
raise Exception("'bid' is a required argument for new publishes.")
|
|
|
|
if 'file_path' not in kwargs:
|
|
|
|
raise Exception("'file_path' is a required argument for new publishes.")
|
|
|
|
return await self.stream_create(name, **kwargs)
|
|
|
|
elif len(claims) == 1:
|
|
|
|
assert claims[0].claim.is_stream, f"Claim at name '{name}' is not a stream claim."
|
|
|
|
return await self.stream_update(claims[0].claim_id, replace=True, **kwargs)
|
|
|
|
raise Exception(
|
|
|
|
f"There are {len(claims)} claims for '{name}', please use 'stream update' command "
|
|
|
|
f"to update a specific stream claim."
|
|
|
|
)
|
|
|
|
|
|
|
|
async def stream_repost(self, name, bid, claim_id, allow_duplicate_name=False, channel_id=None,
|
|
|
|
channel_name=None, channel_account_id=None, account_id=None, wallet_id=None,
|
|
|
|
claim_address=None, funding_account_ids=None, preview=False, blocking=False):
|
|
|
|
"""
|
|
|
|
Creates a claim that references an existing stream by its claim id.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_repost (<name> | --name=<name>) (<bid> | --bid=<bid>) (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
[--allow_duplicate_name=<allow_duplicate_name>]
|
|
|
|
[--channel_id=<channel_id> | --channel_name=<channel_name>]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--claim_id=<claim_id> : (str) id of the claim being reposted
|
|
|
|
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
|
|
|
|
given name. default: false.
|
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
|
|
|
--channel_name=<channel_name> : (str) name of the publisher channel
|
|
|
|
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
self.valid_stream_name_or_error(name)
|
2020-05-06 16:53:31 +02:00
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
claim_address = await self.get_receiving_address(claim_address, account)
|
|
|
|
claims = await account.get_claims(claim_name=name)
|
|
|
|
if len(claims) > 0:
|
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a stream claim published under the name '{name}'. "
|
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
|
|
|
if not VALID_FULL_CLAIM_ID.fullmatch(claim_id):
|
|
|
|
raise Exception('Invalid claim id. It is expected to be a 40 characters long hexadecimal string.')
|
|
|
|
|
|
|
|
claim = Claim()
|
|
|
|
claim.repost.reference.claim_id = claim_id
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def stream_create(
|
|
|
|
self, name, bid, file_path, allow_duplicate_name=False,
|
|
|
|
channel_id=None, channel_name=None, channel_account_id=None,
|
|
|
|
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
|
|
|
|
preview=False, blocking=False, validate_file=False, optimize_file=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Make a new stream claim and announce the associated file to lbrynet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_create (<name> | --name=<name>) (<bid> | --bid=<bid>) (<file_path> | --file_path=<file_path>)
|
|
|
|
[--validate_file] [--optimize_file]
|
|
|
|
[--allow_duplicate_name=<allow_duplicate_name>]
|
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>] [--fee_address=<fee_address>]
|
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>]
|
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
|
|
|
[--channel_id=<channel_id> | --channel_name=<channel_name>]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the content (can only consist of a-z A-Z 0-9 and -(dash))
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
|
|
|
--validate_file : (bool) validate that the video container and encodings match
|
|
|
|
common web browser support or that optimization succeeds if specified.
|
|
|
|
FFmpeg is required
|
|
|
|
--optimize_file : (bool) transcode the video & audio if necessary to ensure
|
|
|
|
common web browser support. FFmpeg is required
|
|
|
|
--allow_duplicate_name=<allow_duplicate_name> : (bool) create new claim even if one already exists with
|
|
|
|
given name. default: false.
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
|
|
|
--channel_name=<channel_name> : (str) name of the publisher channel
|
|
|
|
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
self.ledger.valid_stream_name_or_error(name)
|
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
|
|
|
channel = await wallet.channels.get_for_signing_or_none(claim_id=channel_id, claim_name=channel_name)
|
|
|
|
amount = self.ledger.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
claim_address = await account.get_valid_receiving_address(claim_address)
|
|
|
|
kwargs['fee_address'] = self.ledger.get_fee_address(kwargs, claim_address)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
claims = await wallet.streams.list(claim_name=name)
|
2020-05-01 15:33:58 +02:00
|
|
|
if len(claims) > 0:
|
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a stream claim published under the name '{name}'. "
|
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
# TODO: fix
|
|
|
|
#file_path, spec = await self._video_file_analyzer.verify_or_repair(
|
|
|
|
# validate_file, optimize_file, file_path, ignore_non_video=True
|
|
|
|
#)
|
|
|
|
#kwargs.update(spec)
|
|
|
|
|
|
|
|
wallet.streams.create(
|
2020-05-01 15:33:58 +02:00
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
)
|
2020-05-01 15:33:58 +02:00
|
|
|
claim = Claim()
|
|
|
|
claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs)
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
file_stream = None
|
|
|
|
if not preview:
|
|
|
|
file_stream = await self.stream_manager.create_stream(file_path)
|
|
|
|
claim.stream.source.sd_hash = file_stream.sd_hash
|
|
|
|
new_txo.script.generate()
|
|
|
|
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, claim, name, dewies_to_lbc(amount)
|
|
|
|
)])
|
|
|
|
await self.storage.save_content_claim(file_stream.stream_hash, new_txo.id)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def stream_update(
|
|
|
|
self, claim_id, bid=None, file_path=None,
|
|
|
|
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
|
|
|
|
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
|
|
|
|
preview=False, blocking=False, replace=False, validate_file=False, optimize_file=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Update an existing stream claim and if a new file is provided announce it to lbrynet.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>] [--file_path=<file_path>]
|
|
|
|
[--validate_file] [--optimize_file]
|
|
|
|
[--file_name=<file_name>] [--file_size=<file_size>] [--file_hash=<file_hash>]
|
|
|
|
[--fee_currency=<fee_currency>] [--fee_amount=<fee_amount>]
|
|
|
|
[--fee_address=<fee_address>] [--clear_fee]
|
|
|
|
[--title=<title>] [--description=<description>] [--author=<author>]
|
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
|
|
|
[--license=<license>] [--license_url=<license_url>] [--thumbnail_url=<thumbnail_url>]
|
|
|
|
[--release_time=<release_time>] [--width=<width>] [--height=<height>] [--duration=<duration>]
|
|
|
|
[--channel_id=<channel_id> | --channel_name=<channel_name> | --clear_channel]
|
|
|
|
[--channel_account_id=<channel_account_id>...]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking] [--replace]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) id of the stream claim to update
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--file_path=<file_path> : (str) path to file to be associated with name.
|
|
|
|
--validate_file : (bool) validate that the video container and encodings match
|
|
|
|
common web browser support or that optimization succeeds if specified.
|
|
|
|
FFmpeg is required and file_path must be specified.
|
|
|
|
--optimize_file : (bool) transcode the video & audio if necessary to ensure common
|
|
|
|
web browser support. FFmpeg is required and file_path must be specified.
|
|
|
|
--file_name=<file_name> : (str) override file name, defaults to name from file_path.
|
|
|
|
--file_size=<file_size> : (str) override file size, otherwise automatically computed.
|
|
|
|
--file_hash=<file_hash> : (str) override file hash, otherwise automatically computed.
|
|
|
|
--fee_currency=<fee_currency> : (string) specify fee currency
|
|
|
|
--fee_amount=<fee_amount> : (decimal) content download fee
|
|
|
|
--fee_address=<fee_address> : (str) address where to send fee payments, will use
|
|
|
|
value from --claim_address if not provided
|
|
|
|
--clear_fee : (bool) clear previously set fee
|
|
|
|
--title=<title> : (str) title of the publication
|
|
|
|
--description=<description> : (str) description of the publication
|
|
|
|
--author=<author> : (str) author of the publication. The usage for this field is not
|
|
|
|
the same as for channels. The author field is used to credit an author
|
|
|
|
who is not the publisher and is not represented by the channel. For
|
|
|
|
example, a pdf file of 'The Odyssey' has an author of 'Homer' but may
|
|
|
|
by published to a channel such as '@classics', or to no channel at all
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the channel,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--locations=<locations> : (list) locations relevant to the stream, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
|
|
|
--license=<license> : (str) publication license
|
|
|
|
--license_url=<license_url> : (str) publication license url
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--release_time=<release_time> : (int) original public release of content, seconds since UNIX epoch
|
|
|
|
--width=<width> : (int) image/video width, automatically calculated from media file
|
|
|
|
--height=<height> : (int) image/video height, automatically calculated from media file
|
|
|
|
--duration=<duration> : (int) audio/video duration in seconds, automatically calculated
|
|
|
|
--channel_id=<channel_id> : (str) claim id of the publisher channel
|
|
|
|
--channel_name=<channel_name> : (str) name of the publisher channel
|
|
|
|
--clear_channel : (bool) remove channel signature
|
|
|
|
--channel_account_id=<channel_account_id>: (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts.
|
|
|
|
--account_id=<account_id> : (str) account in which to look for stream (default: all)
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the claim is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
--replace : (bool) instead of modifying specific values on
|
|
|
|
the stream, this will clear all existing values
|
|
|
|
and only save passed in values, useful for form
|
|
|
|
submissions where all values are always set
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
existing_claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, claim_id=claim_id
|
|
|
|
)
|
|
|
|
if len(existing_claims) != 1:
|
|
|
|
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
|
|
|
|
raise Exception(
|
|
|
|
f"Can't find the stream '{claim_id}' in account(s) {account_ids}."
|
|
|
|
)
|
|
|
|
old_txo = existing_claims[0]
|
|
|
|
if not old_txo.claim.is_stream:
|
|
|
|
raise Exception(
|
|
|
|
f"A claim with id '{claim_id}' was found but it is not a stream claim."
|
|
|
|
)
|
|
|
|
|
|
|
|
if bid is not None:
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
else:
|
|
|
|
amount = old_txo.amount
|
|
|
|
|
|
|
|
if claim_address is not None:
|
|
|
|
self.valid_address_or_error(claim_address)
|
|
|
|
else:
|
|
|
|
claim_address = old_txo.get_address(account.ledger)
|
|
|
|
|
|
|
|
channel = None
|
|
|
|
if channel_id or channel_name:
|
|
|
|
channel = await self.get_channel_or_error(
|
|
|
|
wallet, channel_account_id, channel_id, channel_name, for_signing=True)
|
|
|
|
elif old_txo.claim.is_signed and not clear_channel and not replace:
|
|
|
|
channel = old_txo.channel
|
|
|
|
|
|
|
|
fee_address = self.get_fee_address(kwargs, claim_address)
|
|
|
|
if fee_address:
|
|
|
|
kwargs['fee_address'] = fee_address
|
|
|
|
|
|
|
|
file_path, spec = await self._video_file_analyzer.verify_or_repair(
|
|
|
|
validate_file, optimize_file, file_path, ignore_non_video=True
|
|
|
|
)
|
|
|
|
kwargs.update(spec)
|
|
|
|
|
|
|
|
if replace:
|
|
|
|
claim = Claim()
|
|
|
|
claim.stream.message.source.CopyFrom(
|
|
|
|
old_txo.claim.stream.message.source
|
|
|
|
)
|
|
|
|
stream_type = old_txo.claim.stream.stream_type
|
|
|
|
if stream_type:
|
|
|
|
old_stream_type = getattr(old_txo.claim.stream.message, stream_type)
|
|
|
|
new_stream_type = getattr(claim.stream.message, stream_type)
|
|
|
|
new_stream_type.CopyFrom(old_stream_type)
|
|
|
|
claim.stream.update(file_path=file_path, **kwargs)
|
|
|
|
else:
|
|
|
|
claim = Claim.from_bytes(old_txo.claim.to_bytes())
|
|
|
|
claim.stream.update(file_path=file_path, **kwargs)
|
|
|
|
tx = await Transaction.claim_update(
|
|
|
|
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
stream_hash = None
|
|
|
|
if not preview:
|
|
|
|
old_stream = self.stream_manager.streams.get(old_txo.claim.stream.source.sd_hash, None)
|
|
|
|
if file_path is not None:
|
|
|
|
if old_stream:
|
|
|
|
await self.stream_manager.delete_stream(old_stream, delete_file=False)
|
|
|
|
file_stream = await self.stream_manager.create_stream(file_path)
|
|
|
|
new_txo.claim.stream.source.sd_hash = file_stream.sd_hash
|
|
|
|
new_txo.script.generate()
|
|
|
|
stream_hash = file_stream.stream_hash
|
|
|
|
elif old_stream:
|
|
|
|
stream_hash = old_stream.stream_hash
|
|
|
|
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
await self.storage.save_claims([self._old_get_temp_claim_info(
|
|
|
|
tx, new_txo, claim_address, new_txo.claim, new_txo.claim_name, dewies_to_lbc(amount)
|
|
|
|
)])
|
|
|
|
if stream_hash:
|
|
|
|
await self.storage.save_content_claim(stream_hash, new_txo.id)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def stream_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, account_id=None, wallet_id=None,
|
|
|
|
preview=False, blocking=False):
|
|
|
|
"""
|
|
|
|
Abandon one of my stream claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
if txid is not None and nout is not None:
|
|
|
|
claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout
|
|
|
|
)
|
|
|
|
elif claim_id is not None:
|
|
|
|
claims = await self.ledger.get_claims(
|
|
|
|
wallet=wallet, accounts=accounts, claim_id=claim_id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not claims:
|
|
|
|
raise Exception('No claim found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in claims], [], accounts, account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
|
|
|
|
else:
|
|
|
|
await self.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def stream_list(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
List my stream claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--name=<name>...] [--claim_id=<claim_id>...] [--is_spent]
|
|
|
|
[--page=<page>] [--page_size=<page_size>] [--resolve] [--no_totals]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str or list) stream name
|
|
|
|
--claim_id=<claim_id> : (str or list) stream id
|
|
|
|
--is_spent : (bool) shows previous stream updates and abandons
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--resolve : (bool) resolves each stream to provide additional metadata
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
|
|
|
kwargs['type'] = 'stream'
|
|
|
|
if 'is_spent' not in kwargs:
|
|
|
|
kwargs['is_not_spent'] = True
|
|
|
|
return await self.txo_list(*args, **kwargs)
|
|
|
|
|
|
|
|
async def stream_cost_estimate(self, uri):
|
|
|
|
"""
|
|
|
|
Get estimated cost for a lbry stream
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
stream_cost_estimate (<uri> | --uri=<uri>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--uri=<uri> : (str) uri to use
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(float) Estimated cost in lbry credits, returns None if uri is not
|
|
|
|
resolvable
|
|
|
|
"""
|
|
|
|
return self.get_est_cost_from_uri(uri)
|
|
|
|
|
|
|
|
COLLECTION_DOC = """
|
|
|
|
Create, update, list, resolve, and abandon collections.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def collection_create(
|
|
|
|
self, name, bid, claims, allow_duplicate_name=False,
|
|
|
|
channel_id=None, channel_name=None, channel_account_id=None,
|
|
|
|
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
|
|
|
|
preview=False, blocking=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Create a new collection.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
collection_create (<name> | --name=<name>) (<bid> | --bid=<bid>)
|
|
|
|
(<claims>... | --claims=<claims>...)
|
|
|
|
[--allow_duplicate_name]
|
|
|
|
[--title=<title>] [--description=<description>]
|
|
|
|
[--tags=<tags>...] [--languages=<languages>...] [--locations=<locations>...]
|
|
|
|
[--thumbnail_url=<thumbnail_url>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str) name of the collection
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--claims=<claims> : (list) claim ids to be included in the collection
|
|
|
|
--allow_duplicate_name : (bool) create new collection even if one already exists with
|
|
|
|
given name. default: false.
|
|
|
|
--title=<title> : (str) title of the collection
|
|
|
|
--description=<description> : (str) description of the collection
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--tags=<tags> : (list) content tags
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the collection,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--locations=<locations> : (list) locations of the collection, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the collection is sent to, if not specified
|
|
|
|
it will be determined automatically from the account
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
self.valid_collection_name_or_error(name)
|
|
|
|
channel = await self.get_channel_or_none(wallet, channel_account_id, channel_id, channel_name, for_signing=True)
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
claim_address = await self.get_receiving_address(claim_address, account)
|
|
|
|
|
|
|
|
existing_collections = await self.ledger.get_collections(accounts=wallet.accounts, claim_name=name)
|
|
|
|
if len(existing_collections) > 0:
|
|
|
|
if not allow_duplicate_name:
|
|
|
|
raise Exception(
|
|
|
|
f"You already have a collection under the name '{name}'. "
|
|
|
|
f"Use --allow-duplicate-name flag to override."
|
|
|
|
)
|
|
|
|
|
|
|
|
claim = Claim()
|
|
|
|
claim.collection.update(claims=claims, **kwargs)
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
name, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def collection_update(
|
|
|
|
self, claim_id, bid=None,
|
|
|
|
channel_id=None, channel_name=None, channel_account_id=None, clear_channel=False,
|
|
|
|
account_id=None, wallet_id=None, claim_address=None, funding_account_ids=None,
|
|
|
|
preview=False, blocking=False, replace=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Update an existing collection claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
collection_update (<claim_id> | --claim_id=<claim_id>) [--bid=<bid>]
|
|
|
|
[--claims=<claims>...] [--clear_claims]
|
|
|
|
[--title=<title>] [--description=<description>]
|
|
|
|
[--tags=<tags>...] [--clear_tags]
|
|
|
|
[--languages=<languages>...] [--clear_languages]
|
|
|
|
[--locations=<locations>...] [--clear_locations]
|
|
|
|
[--thumbnail_url=<thumbnail_url>] [--cover_url=<cover_url>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--claim_address=<claim_address>] [--new_signing_key]
|
|
|
|
[--funding_account_ids=<funding_account_ids>...]
|
|
|
|
[--preview] [--blocking] [--replace]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the collection to update
|
|
|
|
--bid=<bid> : (decimal) amount to back the claim
|
|
|
|
--claims=<claims> : (list) claim ids
|
|
|
|
--clear_claims : (bool) clear existing claim references (prior to adding new ones)
|
|
|
|
--title=<title> : (str) title of the collection
|
|
|
|
--description=<description> : (str) description of the collection
|
|
|
|
--tags=<tags> : (list) add content tags
|
|
|
|
--clear_tags : (bool) clear existing tags (prior to adding new ones)
|
|
|
|
--languages=<languages> : (list) languages used by the collection,
|
|
|
|
using RFC 5646 format, eg:
|
|
|
|
for English `--languages=en`
|
|
|
|
for Spanish (Spain) `--languages=es-ES`
|
|
|
|
for Spanish (Mexican) `--languages=es-MX`
|
|
|
|
for Chinese (Simplified) `--languages=zh-Hans`
|
|
|
|
for Chinese (Traditional) `--languages=zh-Hant`
|
|
|
|
--clear_languages : (bool) clear existing languages (prior to adding new ones)
|
|
|
|
--locations=<locations> : (list) locations of the collection, consisting of 2 letter
|
|
|
|
`country` code and a `state`, `city` and a postal
|
|
|
|
`code` along with a `latitude` and `longitude`.
|
|
|
|
for JSON RPC: pass a dictionary with aforementioned
|
|
|
|
attributes as keys, eg:
|
|
|
|
...
|
|
|
|
"locations": [{'country': 'US', 'state': 'NH'}]
|
|
|
|
...
|
|
|
|
for command line: pass a colon delimited list
|
|
|
|
with values in the following order:
|
|
|
|
|
|
|
|
"COUNTRY:STATE:CITY:CODE:LATITUDE:LONGITUDE"
|
|
|
|
|
|
|
|
making sure to include colon for blank values, for
|
|
|
|
example to provide only the city:
|
|
|
|
|
|
|
|
... --locations="::Manchester"
|
|
|
|
|
|
|
|
with all values set:
|
|
|
|
|
|
|
|
... --locations="US:NH:Manchester:03101:42.990605:-71.460989"
|
|
|
|
|
|
|
|
optionally, you can just pass the "LATITUDE:LONGITUDE":
|
|
|
|
|
|
|
|
... --locations="42.990605:-71.460989"
|
|
|
|
|
|
|
|
finally, you can also pass JSON string of dictionary
|
|
|
|
on the command line as you would via JSON RPC
|
|
|
|
|
|
|
|
... --locations="{'country': 'US', 'state': 'NH'}"
|
|
|
|
|
|
|
|
--clear_locations : (bool) clear existing locations (prior to adding new ones)
|
|
|
|
--thumbnail_url=<thumbnail_url>: (str) thumbnail url
|
|
|
|
--account_id=<account_id> : (str) account in which to look for collection (default: all)
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--claim_address=<claim_address>: (str) address where the collection is sent
|
|
|
|
--new_signing_key : (bool) generate a new signing key, will invalidate all previous publishes
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
--replace : (bool) instead of modifying specific values on
|
|
|
|
the collection, this will clear all existing values
|
|
|
|
and only save passed in values, useful for form
|
|
|
|
submissions where all values are always set
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
existing_collections = await self.ledger.get_collections(
|
|
|
|
wallet=wallet, accounts=accounts, claim_id=claim_id
|
|
|
|
)
|
|
|
|
if len(existing_collections) != 1:
|
|
|
|
account_ids = ', '.join(f"'{account.id}'" for account in accounts)
|
|
|
|
raise Exception(
|
|
|
|
f"Can't find the collection '{claim_id}' in account(s) {account_ids}."
|
|
|
|
)
|
|
|
|
old_txo = existing_collections[0]
|
|
|
|
if not old_txo.claim.is_collection:
|
|
|
|
raise Exception(
|
|
|
|
f"A claim with id '{claim_id}' was found but it is not a collection."
|
|
|
|
)
|
|
|
|
|
|
|
|
if bid is not None:
|
|
|
|
amount = self.get_dewies_or_error('bid', bid, positive_value=True)
|
|
|
|
else:
|
|
|
|
amount = old_txo.amount
|
|
|
|
|
|
|
|
if claim_address is not None:
|
|
|
|
self.valid_address_or_error(claim_address)
|
|
|
|
else:
|
|
|
|
claim_address = old_txo.get_address(account.ledger)
|
|
|
|
|
|
|
|
channel = None
|
|
|
|
if channel_id or channel_name:
|
|
|
|
channel = await self.get_channel_or_error(
|
|
|
|
wallet, channel_account_id, channel_id, channel_name, for_signing=True)
|
|
|
|
elif old_txo.claim.is_signed and not clear_channel and not replace:
|
|
|
|
channel = old_txo.channel
|
|
|
|
|
|
|
|
if replace:
|
|
|
|
claim = Claim()
|
|
|
|
claim.collection.message.source.CopyFrom(
|
|
|
|
old_txo.claim.collection.message.source
|
|
|
|
)
|
|
|
|
claim.collection.update(**kwargs)
|
|
|
|
else:
|
|
|
|
claim = Claim.from_bytes(old_txo.claim.to_bytes())
|
|
|
|
claim.collection.update(**kwargs)
|
|
|
|
tx = await Transaction.claim_update(
|
|
|
|
old_txo, claim, amount, claim_address, funding_accounts, funding_accounts[0], channel
|
|
|
|
)
|
|
|
|
new_txo = tx.outputs[0]
|
|
|
|
|
|
|
|
new_txo.script.generate()
|
|
|
|
|
|
|
|
if channel:
|
|
|
|
new_txo.sign(channel)
|
|
|
|
await tx.sign(funding_accounts)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('publish'))
|
|
|
|
else:
|
|
|
|
await account.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def collection_abandon(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Abandon one of my collection claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
collection_abandon [<claim_id> | --claim_id=<claim_id>]
|
|
|
|
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
|
|
|
return await self.stream_abandon(*args, **kwargs)
|
|
|
|
|
|
|
|
async def collection_list(self, resolve_claims=0, account_id=None, wallet_id=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List my collection claims.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
collection_list [--resolve_claims=<resolve_claims>] [<account_id> | --account_id=<account_id>]
|
|
|
|
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--resolve_claims=<resolve_claims> : (int) resolve every claim
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
collections = account.get_collections
|
|
|
|
collection_count = account.get_collection_count
|
|
|
|
else:
|
|
|
|
collections = partial(self.ledger.get_collections, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
collection_count = partial(self.ledger.get_collection_count, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
return paginate_rows(collections, collection_count, page, page_size, resolve_claims=resolve_claims)
|
|
|
|
|
|
|
|
async def collection_resolve(
|
|
|
|
self, claim_id=None, url=None, wallet_id=None, page=1, page_size=DEFAULT_PAGE_SIZE):
|
|
|
|
"""
|
|
|
|
Resolve claims in the collection.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
collection_resolve (--claim_id=<claim_id> | --url=<url>)
|
|
|
|
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim id of the collection
|
|
|
|
--url=<url> : (str) url of the collection
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
if claim_id:
|
|
|
|
txo = await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id)
|
|
|
|
if not isinstance(txo, Output) or not txo.is_claim:
|
|
|
|
raise Exception(f"Could not find collection with claim_id '{claim_id}'. ")
|
|
|
|
elif url:
|
|
|
|
txo = (await self.ledger.resolve(wallet.accounts, [url]))[url]
|
|
|
|
if not isinstance(txo, Output) or not txo.is_claim:
|
|
|
|
raise Exception(f"Could not find collection with url '{url}'. ")
|
|
|
|
else:
|
|
|
|
raise Exception(f"Missing argument claim_id or url. ")
|
|
|
|
|
|
|
|
page_num, page_size = abs(page), min(abs(page_size), 50)
|
|
|
|
items = await self.ledger.resolve_collection(txo, page_size * (page_num - 1), page_size)
|
|
|
|
total_items = len(txo.claim.collection.claims.ids)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"items": items,
|
|
|
|
"total_pages": int((total_items + (page_size - 1)) / page_size),
|
|
|
|
"total_items": total_items,
|
|
|
|
"page_size": page_size,
|
|
|
|
"page": page
|
|
|
|
}
|
|
|
|
|
|
|
|
SUPPORT_DOC = """
|
|
|
|
Create, list and abandon all types of supports.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def support_create(
|
|
|
|
self, claim_id, amount, tip=False, account_id=None, wallet_id=None, funding_account_ids=None,
|
|
|
|
preview=False, blocking=False):
|
|
|
|
"""
|
|
|
|
Create a support or a tip for name claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
support_create (<claim_id> | --claim_id=<claim_id>) (<amount> | --amount=<amount>)
|
|
|
|
[--tip] [--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking] [--funding_account_ids=<funding_account_ids>...]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the claim to support
|
|
|
|
--amount=<amount> : (decimal) amount of support
|
|
|
|
--tip : (bool) send support to claim owner, default: false.
|
|
|
|
--account_id=<account_id> : (str) account to use for holding the transaction
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--funding_account_ids=<funding_account_ids>: (list) ids of accounts to fund this transaction
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until transaction is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
2020-05-06 16:53:31 +02:00
|
|
|
funding_accounts = wallet.accounts.get_or_all(funding_account_ids)
|
|
|
|
amount = self.ledger.get_dewies_or_error("amount", amount)
|
2020-05-01 15:33:58 +02:00
|
|
|
claim = await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id)
|
|
|
|
claim_address = claim.get_address(self.ledger.ledger)
|
|
|
|
if not tip:
|
2020-05-06 16:53:31 +02:00
|
|
|
account = wallet.accounts.get_or_default(account_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
claim_address = await account.receiving.get_or_create_usable_address()
|
|
|
|
|
|
|
|
tx = await Transaction.support(
|
|
|
|
claim.claim_name, claim_id, amount, claim_address, funding_accounts, funding_accounts[0]
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
await self.storage.save_supports({claim_id: [{
|
|
|
|
'txid': tx.id,
|
|
|
|
'nout': tx.position,
|
|
|
|
'address': claim_address,
|
|
|
|
'claim_id': claim_id,
|
|
|
|
'amount': dewies_to_lbc(amount)
|
|
|
|
}]})
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('new_support'))
|
|
|
|
else:
|
|
|
|
await self.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
async def support_list(self, *args, received=False, sent=False, staked=False, **kwargs):
|
|
|
|
"""
|
|
|
|
List staked supports and sent/received tips.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
support_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--name=<name>...] [--claim_id=<claim_id>...]
|
|
|
|
[--received | --sent | --staked] [--is_spent]
|
|
|
|
[--page=<page>] [--page_size=<page_size>] [--no_totals]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--received : (bool) only show received (tips)
|
|
|
|
--sent : (bool) only show sent (tips)
|
|
|
|
--staked : (bool) only show my staked supports
|
|
|
|
--is_spent : (bool) show abandoned supports
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
|
|
|
kwargs['type'] = 'support'
|
|
|
|
if 'is_spent' not in kwargs:
|
|
|
|
kwargs['is_not_spent'] = True
|
|
|
|
if received:
|
|
|
|
kwargs['is_not_my_input'] = True
|
|
|
|
kwargs['is_my_output'] = True
|
|
|
|
elif sent:
|
|
|
|
kwargs['is_my_input'] = True
|
|
|
|
kwargs['is_not_my_output'] = True
|
|
|
|
# spent for not my outputs is undetermined
|
|
|
|
kwargs.pop('is_spent', None)
|
|
|
|
kwargs.pop('is_not_spent', None)
|
|
|
|
elif staked:
|
|
|
|
kwargs['is_my_input'] = True
|
|
|
|
kwargs['is_my_output'] = True
|
|
|
|
return await self.txo_list(*args, **kwargs)
|
|
|
|
|
|
|
|
async def support_abandon(
|
|
|
|
self, claim_id=None, txid=None, nout=None, keep=None,
|
|
|
|
account_id=None, wallet_id=None, preview=False, blocking=False):
|
|
|
|
"""
|
|
|
|
Abandon supports, including tips, of a specific claim, optionally
|
|
|
|
keeping some amount as supports.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
support_abandon [--claim_id=<claim_id>] [(--txid=<txid> --nout=<nout>)] [--keep=<keep>]
|
|
|
|
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) claim_id of the support to abandon
|
|
|
|
--txid=<txid> : (str) txid of the claim to abandon
|
|
|
|
--nout=<nout> : (int) nout of the claim to abandon
|
|
|
|
--keep=<keep> : (decimal) amount of lbc to keep as support
|
|
|
|
--account_id=<account_id> : (str) id of the account to use
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
|
|
|
|
|
|
|
Returns: {Transaction}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
assert not wallet.is_locked, "Cannot spend funds with locked wallet, unlock first."
|
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
accounts = [account]
|
|
|
|
else:
|
|
|
|
account = wallet.default_account
|
|
|
|
accounts = wallet.accounts
|
|
|
|
|
|
|
|
if txid is not None and nout is not None:
|
|
|
|
supports = await self.ledger.get_supports(
|
|
|
|
wallet=wallet, accounts=accounts, tx_hash=unhexlify(txid)[::-1], position=nout
|
|
|
|
)
|
|
|
|
elif claim_id is not None:
|
|
|
|
supports = await self.ledger.get_supports(
|
|
|
|
wallet=wallet, accounts=accounts, claim_id=claim_id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise Exception('Must specify claim_id, or txid and nout')
|
|
|
|
|
|
|
|
if not supports:
|
|
|
|
raise Exception('No supports found for the specified claim_id or txid:nout')
|
|
|
|
|
|
|
|
if keep is not None:
|
|
|
|
keep = self.get_dewies_or_error('keep', keep)
|
|
|
|
else:
|
|
|
|
keep = 0
|
|
|
|
|
|
|
|
outputs = []
|
|
|
|
if keep > 0:
|
|
|
|
outputs = [
|
|
|
|
Output.pay_support_pubkey_hash(
|
|
|
|
keep, supports[0].claim_name, supports[0].claim_id, supports[0].pubkey_hash
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
tx = await Transaction.create(
|
|
|
|
[Input.spend(txo) for txo in supports], outputs, accounts, account
|
|
|
|
)
|
|
|
|
|
|
|
|
if not preview:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
self.component_manager.loop.create_task(self.analytics_manager.send_claim_action('abandon'))
|
|
|
|
else:
|
|
|
|
await self.ledger.release_tx(tx)
|
|
|
|
|
|
|
|
return tx
|
|
|
|
|
|
|
|
TRANSACTION_DOC = """
|
|
|
|
Transaction management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def transaction_list(self, account_id=None, wallet_id=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
List transactions belonging to wallet
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
transaction_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) List of transactions
|
|
|
|
|
|
|
|
{
|
|
|
|
"claim_info": (list) claim info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
|
|
|
"balance_delta": (float) bid amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"abandon_info": (list) abandon info if in txn [{
|
|
|
|
"address": (str) address of abandoned claim,
|
|
|
|
"balance_delta": (float) returned amount,
|
|
|
|
"amount": (float) claim amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"confirmations": (int) number of confirmations for the txn,
|
|
|
|
"date": (str) date and time of txn,
|
|
|
|
"fee": (float) txn fee,
|
|
|
|
"support_info": (list) support info if in txn [{
|
|
|
|
"address": (str) address of support,
|
|
|
|
"balance_delta": (float) support amount,
|
|
|
|
"amount": (float) support amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"is_tip": (bool),
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"timestamp": (int) timestamp,
|
|
|
|
"txid": (str) txn id,
|
|
|
|
"update_info": (list) update info if in txn [{
|
|
|
|
"address": (str) address of claim,
|
|
|
|
"balance_delta": (float) credited/debited
|
|
|
|
"amount": (float) absolute amount,
|
|
|
|
"claim_id": (str) claim id,
|
|
|
|
"claim_name": (str) claim name,
|
|
|
|
"nout": (int) nout
|
|
|
|
}],
|
|
|
|
"value": (float) value of txn
|
|
|
|
}
|
|
|
|
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
transactions = account.get_transaction_history
|
|
|
|
transaction_count = account.get_transaction_history_count
|
|
|
|
else:
|
|
|
|
transactions = partial(
|
|
|
|
self.ledger.get_transaction_history, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
transaction_count = partial(
|
|
|
|
self.ledger.get_transaction_history_count, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
return paginate_rows(transactions, transaction_count, page, page_size)
|
|
|
|
|
|
|
|
async def transaction_search(self, txids):
|
|
|
|
"""
|
|
|
|
Search for transaction(s) in the entire blockchain.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
transaction_search <txid>...
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns: {List[Transaction]}
|
|
|
|
"""
|
|
|
|
return await self.service.search_transactions(txids)
|
|
|
|
|
|
|
|
TXO_DOC = """
|
|
|
|
List and sum transaction outputs.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _constrain_txo_from_kwargs(
|
|
|
|
constraints, type=None, txid=None, # pylint: disable=redefined-builtin
|
|
|
|
claim_id=None, channel_id=None, name=None, reposted_claim_id=None,
|
|
|
|
is_spent=False, is_not_spent=False,
|
|
|
|
is_my_input_or_output=None, exclude_internal_transfers=False,
|
|
|
|
is_my_output=None, is_not_my_output=None,
|
|
|
|
is_my_input=None, is_not_my_input=None):
|
|
|
|
if is_spent:
|
|
|
|
constraints['is_spent'] = True
|
|
|
|
elif is_not_spent:
|
|
|
|
constraints['is_spent'] = False
|
|
|
|
constraints['exclude_internal_transfers'] = exclude_internal_transfers
|
|
|
|
if is_my_input_or_output is True:
|
|
|
|
constraints['is_my_input_or_output'] = True
|
|
|
|
else:
|
|
|
|
if is_my_input is True:
|
|
|
|
constraints['is_my_input'] = True
|
|
|
|
elif is_not_my_input is True:
|
|
|
|
constraints['is_my_input'] = False
|
|
|
|
if is_my_output is True:
|
|
|
|
constraints['is_my_output'] = True
|
|
|
|
elif is_not_my_output is True:
|
|
|
|
constraints['is_my_output'] = False
|
|
|
|
to_hash = lambda x: unhexlify(x)[::-1]
|
|
|
|
constrain_single_or_list(constraints, 'txo_type', type, lambda x: TXO_TYPES[x])
|
|
|
|
constrain_single_or_list(constraints, 'channel_hash', channel_id, to_hash)
|
|
|
|
constrain_single_or_list(constraints, 'claim_hash', claim_id, to_hash)
|
|
|
|
constrain_single_or_list(constraints, 'claim_name', name)
|
|
|
|
constrain_single_or_list(constraints, 'tx_hash', txid, to_hash)
|
|
|
|
constrain_single_or_list(constraints, 'reposted_claim_hash', reposted_claim_id, to_hash)
|
|
|
|
return constraints
|
|
|
|
|
|
|
|
async def txo_list(
|
|
|
|
self, account_id=None, wallet_id=None, page=None, page_size=None,
|
|
|
|
resolve=False, order_by=None, no_totals=False, include_received_tips=False, **kwargs):
|
|
|
|
"""
|
|
|
|
List my transaction outputs.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
|
|
|
|
[--claim_id=<claim_id>...] [--channel_id=<channel_id>...] [--name=<name>...]
|
|
|
|
[--is_spent | --is_not_spent]
|
|
|
|
[--is_my_input_or_output |
|
|
|
|
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
|
|
|
|
]
|
|
|
|
[--exclude_internal_transfers] [--include_received_tips]
|
|
|
|
[--wallet_id=<wallet_id>] [--page=<page>] [--page_size=<page_size>]
|
|
|
|
[--resolve] [--order_by=<order_by>][--no_totals]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--type=<type> : (str or list) claim type: stream, channel, support,
|
|
|
|
purchase, collection, repost, other
|
|
|
|
--txid=<txid> : (str or list) transaction id of outputs
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--channel_id=<channel_id> : (str or list) claims in this channel
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--is_spent : (bool) only show spent txos
|
|
|
|
--is_not_spent : (bool) only show not spent txos
|
|
|
|
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
|
|
|
|
if using this flag the other related flags
|
|
|
|
are ignored (--is_my_output, --is_my_input, etc)
|
|
|
|
--is_my_output : (bool) show outputs controlled by you
|
|
|
|
--is_not_my_output : (bool) show outputs not controlled by you
|
|
|
|
--is_my_input : (bool) show outputs created by you
|
|
|
|
--is_not_my_input : (bool) show outputs not created by you
|
|
|
|
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
|
|
|
|
"--is_my_input --is_my_output --type=other"
|
|
|
|
this allows to exclude "change" payments, this
|
|
|
|
flag can be used in combination with any of the other flags
|
|
|
|
--include_received_tips : (bool) calculate the amount of tips recieved for claim outputs
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
--resolve : (bool) resolves each claim to provide additional metadata
|
|
|
|
--order_by=<order_by> : (str) field to order by: 'name', 'height', 'amount' and 'none'
|
|
|
|
--no_totals : (bool) do not calculate the total number of pages and items in result set
|
|
|
|
(significant performance boost)
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id:
|
|
|
|
account = wallet.get_account_or_error(account_id)
|
|
|
|
claims = account.get_txos
|
|
|
|
claim_count = account.get_txo_count
|
|
|
|
else:
|
|
|
|
claims = partial(self.service.get_txos, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
claim_count = partial(self.service.get_txo_count, wallet=wallet, accounts=wallet.accounts)
|
|
|
|
constraints = {
|
|
|
|
'resolve': resolve,
|
|
|
|
'include_is_spent': True,
|
|
|
|
'include_is_my_input': True,
|
|
|
|
'include_is_my_output': True,
|
|
|
|
'include_received_tips': include_received_tips
|
|
|
|
}
|
|
|
|
if order_by is not None:
|
|
|
|
if order_by == 'name':
|
|
|
|
constraints['order_by'] = 'txo.claim_name'
|
|
|
|
elif order_by in ('height', 'amount', 'none'):
|
|
|
|
constraints['order_by'] = order_by
|
|
|
|
else:
|
|
|
|
raise ValueError(f"'{order_by}' is not a valid --order_by value.")
|
|
|
|
self._constrain_txo_from_kwargs(constraints, **kwargs)
|
|
|
|
return await paginate_rows(claims, None if no_totals else claim_count, page, page_size, **constraints)
|
|
|
|
|
|
|
|
async def txo_spend(
|
|
|
|
self, account_id=None, wallet_id=None, batch_size=500,
|
|
|
|
include_full_tx=False, preview=False, blocking=False, **kwargs):
|
|
|
|
"""
|
|
|
|
Spend transaction outputs, batching into multiple transactions as necessary.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
txo_spend [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
|
|
|
|
[--claim_id=<claim_id>...] [--channel_id=<channel_id>...] [--name=<name>...]
|
|
|
|
[--is_my_input | --is_not_my_input]
|
|
|
|
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
|
|
|
|
[--preview] [--blocking] [--batch_size=<batch_size>] [--include_full_tx]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--type=<type> : (str or list) claim type: stream, channel, support,
|
|
|
|
purchase, collection, repost, other
|
|
|
|
--txid=<txid> : (str or list) transaction id of outputs
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--channel_id=<channel_id> : (str or list) claims in this channel
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--is_my_input : (bool) show outputs created by you
|
|
|
|
--is_not_my_input : (bool) show outputs not created by you
|
|
|
|
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
|
|
|
|
"--is_my_input --is_my_output --type=other"
|
|
|
|
this allows to exclude "change" payments, this
|
|
|
|
flag can be used in combination with any of the other flags
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--preview : (bool) do not broadcast the transaction
|
|
|
|
--blocking : (bool) wait until abandon is in mempool
|
|
|
|
--batch_size=<batch_size> : (int) number of txos to spend per transactions
|
|
|
|
--include_full_tx : (bool) include entire tx in output and not just the txid
|
|
|
|
|
|
|
|
Returns: {List[Transaction]}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
accounts = [wallet.get_account_or_error(account_id)] if account_id else wallet.accounts
|
|
|
|
txos = await self.ledger.get_txos(
|
|
|
|
wallet=wallet, accounts=accounts,
|
|
|
|
**self._constrain_txo_from_kwargs({}, is_not_spent=True, is_my_output=True, **kwargs)
|
|
|
|
)
|
|
|
|
txs = []
|
|
|
|
while txos:
|
|
|
|
txs.append(
|
|
|
|
await Transaction.create(
|
|
|
|
[Input.spend(txos.pop()) for _ in range(min(len(txos), batch_size))],
|
|
|
|
[], accounts, accounts[0]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
if not preview:
|
|
|
|
for tx in txs:
|
|
|
|
await self.broadcast_or_release(tx, blocking)
|
|
|
|
if include_full_tx:
|
|
|
|
return txs
|
|
|
|
return [{'txid': tx.id} for tx in txs]
|
|
|
|
|
|
|
|
async def txo_sum(self, account_id=None, wallet_id=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Sum of transaction outputs.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
txo_list [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
|
|
|
|
[--claim_id=<claim_id>...] [--name=<name>...]
|
|
|
|
[--is_spent] [--is_not_spent]
|
|
|
|
[--is_my_input_or_output |
|
|
|
|
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
|
|
|
|
]
|
|
|
|
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--type=<type> : (str or list) claim type: stream, channel, support,
|
|
|
|
purchase, collection, repost, other
|
|
|
|
--txid=<txid> : (str or list) transaction id of outputs
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--is_spent : (bool) only show spent txos
|
|
|
|
--is_not_spent : (bool) only show not spent txos
|
|
|
|
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
|
|
|
|
if using this flag the other related flags
|
|
|
|
are ignored (--is_my_output, --is_my_input, etc)
|
|
|
|
--is_my_output : (bool) show outputs controlled by you
|
|
|
|
--is_not_my_output : (bool) show outputs not controlled by you
|
|
|
|
--is_my_input : (bool) show outputs created by you
|
|
|
|
--is_not_my_input : (bool) show outputs not created by you
|
|
|
|
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
|
|
|
|
"--is_my_input --is_my_output --type=other"
|
|
|
|
this allows to exclude "change" payments, this
|
|
|
|
flag can be used in combination with any of the other flags
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
|
|
|
|
Returns: int
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
return self.ledger.get_txo_sum(
|
|
|
|
wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
|
|
|
|
**self._constrain_txo_from_kwargs({}, **kwargs)
|
|
|
|
)
|
|
|
|
|
|
|
|
async def txo_plot(
|
|
|
|
self, account_id=None, wallet_id=None,
|
|
|
|
days_back=0, start_day=None, days_after=None, end_day=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Plot transaction output sum over days.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
txo_plot [--account_id=<account_id>] [--type=<type>...] [--txid=<txid>...]
|
|
|
|
[--claim_id=<claim_id>...] [--name=<name>...] [--is_spent] [--is_not_spent]
|
|
|
|
[--is_my_input_or_output |
|
|
|
|
[[--is_my_output | --is_not_my_output] [--is_my_input | --is_not_my_input]]
|
|
|
|
]
|
|
|
|
[--exclude_internal_transfers] [--wallet_id=<wallet_id>]
|
|
|
|
[--days_back=<days_back> |
|
|
|
|
[--start_day=<start_day> [--days_after=<days_after> | --end_day=<end_day>]]
|
|
|
|
]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--type=<type> : (str or list) claim type: stream, channel, support,
|
|
|
|
purchase, collection, repost, other
|
|
|
|
--txid=<txid> : (str or list) transaction id of outputs
|
|
|
|
--claim_id=<claim_id> : (str or list) claim id
|
|
|
|
--name=<name> : (str or list) claim name
|
|
|
|
--is_spent : (bool) only show spent txos
|
|
|
|
--is_not_spent : (bool) only show not spent txos
|
|
|
|
--is_my_input_or_output : (bool) txos which have your inputs or your outputs,
|
|
|
|
if using this flag the other related flags
|
|
|
|
are ignored (--is_my_output, --is_my_input, etc)
|
|
|
|
--is_my_output : (bool) show outputs controlled by you
|
|
|
|
--is_not_my_output : (bool) show outputs not controlled by you
|
|
|
|
--is_my_input : (bool) show outputs created by you
|
|
|
|
--is_not_my_input : (bool) show outputs not created by you
|
|
|
|
--exclude_internal_transfers: (bool) excludes any outputs that are exactly this combination:
|
|
|
|
"--is_my_input --is_my_output --type=other"
|
|
|
|
this allows to exclude "change" payments, this
|
|
|
|
flag can be used in combination with any of the other flags
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--days_back=<days_back> : (int) number of days back from today
|
|
|
|
(not compatible with --start_day, --days_after, --end_day)
|
|
|
|
--start_day=<start_day> : (date) start on specific date (YYYY-MM-DD)
|
|
|
|
(instead of --days_back)
|
|
|
|
--days_after=<days_after> : (int) end number of days after --start_day
|
|
|
|
(instead of --end_day)
|
|
|
|
--end_day=<end_day> : (date) end on specific date (YYYY-MM-DD)
|
|
|
|
(instead of --days_after)
|
|
|
|
|
|
|
|
Returns: List[Dict]
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
plot = await self.ledger.get_txo_plot(
|
|
|
|
wallet=wallet, accounts=[wallet.get_account_or_error(account_id)] if account_id else wallet.accounts,
|
|
|
|
days_back=days_back, start_day=start_day, days_after=days_after, end_day=end_day,
|
|
|
|
**self._constrain_txo_from_kwargs({}, **kwargs)
|
|
|
|
)
|
|
|
|
for row in plot:
|
|
|
|
row['total'] = dewies_to_lbc(row['total'])
|
|
|
|
return plot
|
|
|
|
|
|
|
|
UTXO_DOC = """
|
|
|
|
Unspent transaction management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def utxo_list(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
List unspent transaction outputs
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
utxo_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns: {Paginated[Output]}
|
|
|
|
"""
|
|
|
|
kwargs['type'] = ['other', 'purchase']
|
|
|
|
kwargs['is_not_spent'] = True
|
|
|
|
return self.txo_list(*args, **kwargs)
|
|
|
|
|
|
|
|
async def utxo_release(self, account_id=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
When spending a UTXO it is locally locked to prevent double spends;
|
|
|
|
occasionally this can result in a UTXO being locked which ultimately
|
|
|
|
did not get spent (failed to broadcast, spend transaction was not
|
|
|
|
accepted by blockchain node, etc). This command releases the lock
|
|
|
|
on all UTXOs in your account.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
utxo_release [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--account_id=<account_id> : (str) id of the account to query
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
None
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
if account_id is not None:
|
|
|
|
await wallet.get_account_or_error(account_id).release_all_outputs()
|
|
|
|
else:
|
|
|
|
for account in wallet.accounts:
|
|
|
|
await account.release_all_outputs()
|
|
|
|
|
|
|
|
BLOB_DOC = """
|
|
|
|
Blob management.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def blob_get(self, blob_hash, timeout=None, read=False):
|
|
|
|
"""
|
|
|
|
Download and return a blob
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_get (<blob_hash> | --blob_hash=<blob_hash>) [--timeout=<timeout>] [--read]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to get
|
|
|
|
--timeout=<timeout> : (int) timeout in number of seconds
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Success/Fail message or (dict) decoded data
|
|
|
|
"""
|
|
|
|
|
|
|
|
blob = await download_blob(asyncio.get_event_loop(), self.conf, self.blob_manager, self.dht_node, blob_hash)
|
|
|
|
if read:
|
|
|
|
with blob.reader_context() as handle:
|
|
|
|
return handle.read().decode()
|
|
|
|
elif isinstance(blob, BlobBuffer):
|
|
|
|
log.warning("manually downloaded blob buffer could have missed garbage collection, clearing it")
|
|
|
|
blob.delete()
|
|
|
|
return "Downloaded blob %s" % blob_hash
|
|
|
|
|
|
|
|
async def blob_delete(self, blob_hash):
|
|
|
|
"""
|
|
|
|
Delete a blob
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) Success/fail message
|
|
|
|
"""
|
|
|
|
if not blob_hash or not is_valid_blobhash(blob_hash):
|
|
|
|
return f"Invalid blob hash to delete '{blob_hash}'"
|
|
|
|
streams = self.stream_manager.get_filtered_streams(sd_hash=blob_hash)
|
|
|
|
if streams:
|
|
|
|
await self.stream_manager.delete_stream(streams[0])
|
|
|
|
else:
|
|
|
|
await self.blob_manager.delete_blobs([blob_hash])
|
|
|
|
return "Deleted %s" % blob_hash
|
|
|
|
|
|
|
|
PEER_DOC = """
|
|
|
|
DHT / Blob Exchange peer commands.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def peer_list(self, blob_hash, search_bottom_out_limit=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
Get peers for blob hash
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
peer_list (<blob_hash> | --blob_hash=<blob_hash>)
|
|
|
|
[<search_bottom_out_limit> | --search_bottom_out_limit=<search_bottom_out_limit>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) find available peers for this blob hash
|
|
|
|
--search_bottom_out_limit=<search_bottom_out_limit> : (int) the number of search probes in a row
|
|
|
|
that don't find any new peers
|
|
|
|
before giving up and returning
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) List of contact dictionaries {'address': <peer ip>, 'udp_port': <dht port>, 'tcp_port': <peer port>,
|
|
|
|
'node_id': <peer node id>}
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not is_valid_blobhash(blob_hash):
|
|
|
|
raise Exception("invalid blob hash")
|
|
|
|
if search_bottom_out_limit is not None:
|
|
|
|
search_bottom_out_limit = int(search_bottom_out_limit)
|
|
|
|
if search_bottom_out_limit <= 0:
|
|
|
|
raise Exception("invalid bottom out limit")
|
|
|
|
else:
|
|
|
|
search_bottom_out_limit = 4
|
|
|
|
peers = []
|
|
|
|
peer_q = asyncio.Queue(loop=self.component_manager.loop)
|
|
|
|
await self.dht_node._peers_for_value_producer(blob_hash, peer_q)
|
|
|
|
while not peer_q.empty():
|
|
|
|
peers.extend(peer_q.get_nowait())
|
|
|
|
results = [
|
|
|
|
{
|
|
|
|
"node_id": hexlify(peer.node_id).decode(),
|
|
|
|
"address": peer.address,
|
|
|
|
"udp_port": peer.udp_port,
|
|
|
|
"tcp_port": peer.tcp_port,
|
|
|
|
}
|
|
|
|
for peer in peers
|
|
|
|
]
|
|
|
|
return paginate_list(results, page, page_size)
|
|
|
|
|
|
|
|
async def blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):
|
|
|
|
"""
|
|
|
|
Announce blobs to the DHT
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_announce (<blob_hash> | --blob_hash=<blob_hash>
|
|
|
|
| --stream_hash=<stream_hash> | --sd_hash=<sd_hash>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--blob_hash=<blob_hash> : (str) announce a blob, specified by blob_hash
|
|
|
|
--stream_hash=<stream_hash> : (str) announce all blobs associated with
|
|
|
|
stream_hash
|
|
|
|
--sd_hash=<sd_hash> : (str) announce all blobs associated with
|
|
|
|
sd_hash and the sd_hash itself
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) true if successful
|
|
|
|
"""
|
|
|
|
blob_hashes = []
|
|
|
|
if blob_hash:
|
|
|
|
blob_hashes.append(blob_hash)
|
|
|
|
elif stream_hash or sd_hash:
|
|
|
|
if sd_hash and stream_hash:
|
|
|
|
raise Exception("either the sd hash or the stream hash should be provided, not both")
|
|
|
|
if sd_hash:
|
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
blobs = await self.storage.get_blobs_for_stream(stream_hash, only_completed=True)
|
|
|
|
blob_hashes.extend(blob.blob_hash for blob in blobs if blob.blob_hash is not None)
|
|
|
|
else:
|
|
|
|
raise Exception('single argument must be specified')
|
|
|
|
await self.storage.should_single_announce_blobs(blob_hashes, immediate=True)
|
|
|
|
return True
|
|
|
|
|
|
|
|
async def blob_list(self, uri=None, stream_hash=None, sd_hash=None, needed=None,
|
|
|
|
finished=None, page=None, page_size=None):
|
|
|
|
"""
|
|
|
|
Returns blob hashes. If not given filters, returns all blobs known by the blob manager
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
blob_list [--needed] [--finished] [<uri> | --uri=<uri>]
|
|
|
|
[<stream_hash> | --stream_hash=<stream_hash>]
|
|
|
|
[<sd_hash> | --sd_hash=<sd_hash>]
|
|
|
|
[--page=<page>] [--page_size=<page_size>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--needed : (bool) only return needed blobs
|
|
|
|
--finished : (bool) only return finished blobs
|
|
|
|
--uri=<uri> : (str) filter blobs by stream in a uri
|
|
|
|
--stream_hash=<stream_hash> : (str) filter blobs by stream hash
|
|
|
|
--sd_hash=<sd_hash> : (str) filter blobs by sd hash
|
|
|
|
--page=<page> : (int) page to return during paginating
|
|
|
|
--page_size=<page_size> : (int) number of items on page during pagination
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) List of blob hashes
|
|
|
|
"""
|
|
|
|
|
|
|
|
if uri or stream_hash or sd_hash:
|
|
|
|
if uri:
|
|
|
|
metadata = (await self.resolve([], uri))[uri]
|
|
|
|
sd_hash = utils.get_sd_hash(metadata)
|
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
elif stream_hash:
|
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
|
|
|
elif sd_hash:
|
|
|
|
stream_hash = await self.storage.get_stream_hash_for_sd_hash(sd_hash)
|
|
|
|
sd_hash = await self.storage.get_sd_blob_hash_for_stream(stream_hash)
|
|
|
|
if sd_hash:
|
|
|
|
blobs = [sd_hash]
|
|
|
|
else:
|
|
|
|
blobs = []
|
|
|
|
if stream_hash:
|
|
|
|
blobs.extend([b.blob_hash for b in (await self.storage.get_blobs_for_stream(stream_hash))[:-1]])
|
|
|
|
else:
|
|
|
|
blobs = list(self.blob_manager.completed_blob_hashes)
|
|
|
|
if needed:
|
|
|
|
blobs = [blob_hash for blob_hash in blobs if not self.blob_manager.is_blob_verified(blob_hash)]
|
|
|
|
if finished:
|
|
|
|
blobs = [blob_hash for blob_hash in blobs if self.blob_manager.is_blob_verified(blob_hash)]
|
|
|
|
return paginate_list(blobs, page, page_size)
|
|
|
|
|
|
|
|
async def file_reflect(self, **kwargs):
|
|
|
|
"""
|
|
|
|
Reflect all the blobs in a file matching the filter criteria
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
file_reflect [--sd_hash=<sd_hash>] [--file_name=<file_name>]
|
|
|
|
[--stream_hash=<stream_hash>] [--rowid=<rowid>]
|
|
|
|
[--reflector=<reflector>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--sd_hash=<sd_hash> : (str) get file with matching sd hash
|
|
|
|
--file_name=<file_name> : (str) get file with matching file name in the
|
|
|
|
downloads folder
|
|
|
|
--stream_hash=<stream_hash> : (str) get file with matching stream hash
|
|
|
|
--rowid=<rowid> : (int) get file with matching row id
|
|
|
|
--reflector=<reflector> : (str) reflector server, ip address or url
|
|
|
|
by default choose a server from the config
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(list) list of blobs reflected
|
|
|
|
"""
|
|
|
|
|
|
|
|
server, port = kwargs.get('server'), kwargs.get('port')
|
|
|
|
if server and port:
|
|
|
|
port = int(port)
|
|
|
|
else:
|
|
|
|
server, port = random.choice(self.conf.reflector_servers)
|
|
|
|
reflected = await asyncio.gather(*[
|
|
|
|
self.stream_manager.reflect_stream(stream, server, port)
|
|
|
|
for stream in self.stream_manager.get_filtered_streams(**kwargs)
|
|
|
|
])
|
|
|
|
total = []
|
|
|
|
for reflected_for_stream in reflected:
|
|
|
|
total.extend(reflected_for_stream)
|
|
|
|
return total
|
|
|
|
|
|
|
|
async def peer_ping(self, node_id, address, port):
|
|
|
|
"""
|
|
|
|
Send a kademlia ping to the specified peer. If address and port are provided the peer is directly pinged,
|
|
|
|
if not provided the peer is located first.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
peer_ping (<node_id> | --node_id=<node_id>) (<address> | --address=<address>) (<port> | --port=<port>)
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str) pong, or {'error': <error message>} if an error is encountered
|
|
|
|
"""
|
|
|
|
peer = None
|
|
|
|
if node_id and address and port:
|
|
|
|
peer = make_kademlia_peer(unhexlify(node_id), address, udp_port=int(port))
|
|
|
|
try:
|
|
|
|
return await self.dht_node.protocol.get_rpc_peer(peer).ping()
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
return {'error': 'timeout'}
|
|
|
|
if not peer:
|
|
|
|
return {'error': 'peer not found'}
|
|
|
|
|
|
|
|
async def routing_table_get(self):
|
|
|
|
"""
|
|
|
|
Get DHT routing information
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
routing_table_get
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) dictionary containing routing and peer information
|
|
|
|
{
|
|
|
|
"buckets": {
|
|
|
|
<bucket index>: [
|
|
|
|
{
|
|
|
|
"address": (str) peer address,
|
|
|
|
"udp_port": (int) peer udp port,
|
|
|
|
"tcp_port": (int) peer tcp port,
|
|
|
|
"node_id": (str) peer node id,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
},
|
|
|
|
"node_id": (str) the local dht node id
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
result = {
|
|
|
|
'buckets': {}
|
|
|
|
}
|
|
|
|
|
|
|
|
for i in range(len(self.dht_node.protocol.routing_table.buckets)):
|
|
|
|
result['buckets'][i] = []
|
|
|
|
for peer in self.dht_node.protocol.routing_table.buckets[i].peers:
|
|
|
|
host = {
|
|
|
|
"address": peer.address,
|
|
|
|
"udp_port": peer.udp_port,
|
|
|
|
"tcp_port": peer.tcp_port,
|
|
|
|
"node_id": hexlify(peer.node_id).decode(),
|
|
|
|
}
|
|
|
|
result['buckets'][i].append(host)
|
|
|
|
|
|
|
|
result['node_id'] = hexlify(self.dht_node.protocol.node_id).decode()
|
|
|
|
return result
|
|
|
|
|
|
|
|
TRACEMALLOC_DOC = """
|
|
|
|
Controls and queries tracemalloc memory tracing tools for troubleshooting.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def tracemalloc_enable(self):
|
|
|
|
"""
|
|
|
|
Enable tracemalloc memory tracing
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
tracemalloc_enable
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) is it tracing?
|
|
|
|
"""
|
|
|
|
tracemalloc.start()
|
|
|
|
return tracemalloc.is_tracing()
|
|
|
|
|
|
|
|
async def tracemalloc_disable(self):
|
|
|
|
"""
|
|
|
|
Disable tracemalloc memory tracing
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
tracemalloc_disable
|
|
|
|
|
|
|
|
Options:
|
|
|
|
None
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(bool) is it tracing?
|
|
|
|
"""
|
|
|
|
tracemalloc.stop()
|
|
|
|
return tracemalloc.is_tracing()
|
|
|
|
|
|
|
|
async def tracemalloc_top(self, items: int = 10):
|
|
|
|
"""
|
|
|
|
Show most common objects, the place that created them and their size.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
tracemalloc_top [(<items> | --items=<items>)]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--items=<items> : (int) maximum items to return, from the most common
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) dictionary containing most common objects in memory
|
|
|
|
{
|
|
|
|
"line": (str) filename and line number where it was created,
|
|
|
|
"code": (str) code that created it,
|
|
|
|
"size": (int) size in bytes, for each "memory block",
|
|
|
|
"count" (int) number of memory blocks
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
if not tracemalloc.is_tracing():
|
|
|
|
raise Exception("Enable tracemalloc first! See 'tracemalloc set' command.")
|
|
|
|
stats = tracemalloc.take_snapshot().filter_traces((
|
|
|
|
tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
|
|
|
|
tracemalloc.Filter(False, "<unknown>"),
|
|
|
|
# tracemalloc and linecache here use some memory, but thats not relevant
|
|
|
|
tracemalloc.Filter(False, tracemalloc.__file__),
|
|
|
|
tracemalloc.Filter(False, linecache.__file__),
|
|
|
|
)).statistics('lineno', True)
|
|
|
|
results = []
|
|
|
|
for stat in stats:
|
|
|
|
frame = stat.traceback[0]
|
|
|
|
filename = os.sep.join(frame.filename.split(os.sep)[-2:])
|
|
|
|
line = linecache.getline(frame.filename, frame.lineno).strip()
|
|
|
|
results.append({
|
|
|
|
"line": f"{filename}:{frame.lineno}",
|
|
|
|
"code": line,
|
|
|
|
"size": stat.size,
|
|
|
|
"count": stat.count
|
|
|
|
})
|
|
|
|
if len(results) == items:
|
|
|
|
break
|
|
|
|
return results
|
|
|
|
|
|
|
|
COMMENT_DOC = """
|
|
|
|
View, create and abandon comments.
|
|
|
|
"""
|
|
|
|
|
|
|
|
async def comment_list(self, claim_id, parent_id=None, page=1, page_size=50,
|
|
|
|
include_replies=True, is_channel_signature_valid=False,
|
|
|
|
hidden=False, visible=False):
|
|
|
|
"""
|
|
|
|
List comments associated with a claim.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_list (<claim_id> | --claim_id=<claim_id>)
|
|
|
|
[(--page=<page> --page_size=<page_size>)]
|
|
|
|
[--parent_id=<parent_id>] [--include_replies]
|
|
|
|
[--is_channel_signature_valid]
|
|
|
|
[--visible | --hidden]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--claim_id=<claim_id> : (str) The claim on which the comment will be made on
|
|
|
|
--parent_id=<parent_id> : (str) CommentId of a specific thread you'd like to see
|
|
|
|
--page=<page> : (int) The page you'd like to see in the comment list.
|
|
|
|
--page_size=<page_size> : (int) The amount of comments that you'd like to retrieve
|
|
|
|
--include_replies : (bool) Whether or not you want to include replies in list
|
|
|
|
--is_channel_signature_valid : (bool) Only include comments with valid signatures.
|
|
|
|
[Warning: Paginated total size will not change, even
|
|
|
|
if list reduces]
|
|
|
|
--visible : (bool) Select only Visible Comments
|
|
|
|
--hidden : (bool) Select only Hidden Comments
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Containing the list, and information about the paginated content:
|
|
|
|
{
|
|
|
|
"page": "Page number of the current items.",
|
|
|
|
"page_size": "Number of items to show on a page.",
|
|
|
|
"total_pages": "Total number of pages.",
|
|
|
|
"total_items": "Total number of items.",
|
|
|
|
"items": "A List of dict objects representing comments."
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"comment": (str) The actual string as inputted by the user,
|
|
|
|
"comment_id": (str) The Comment's unique identifier,
|
|
|
|
"channel_name": (str) Name of the channel this was posted under, prepended with a '@',
|
|
|
|
"channel_id": (str) The Channel Claim ID that this comment was posted under,
|
|
|
|
"signature": (str) The signature of the comment,
|
|
|
|
"channel_url": (str) Channel's URI in the ClaimTrie,
|
|
|
|
"parent_id": (str) Comment this is replying to, (None) if this is the root,
|
|
|
|
"timestamp": (int) The time at which comment was entered into the server at, in nanoseconds.
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
if hidden ^ visible:
|
|
|
|
result = await comment_client.post(
|
|
|
|
self.conf.comment_server,
|
|
|
|
'get_claim_hidden_comments',
|
|
|
|
claim_id=claim_id,
|
|
|
|
hidden=hidden,
|
|
|
|
page=page,
|
|
|
|
page_size=page_size
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
result = await comment_client.post(
|
|
|
|
self.conf.comment_server,
|
|
|
|
'get_claim_comments',
|
|
|
|
claim_id=claim_id,
|
|
|
|
parent_id=parent_id,
|
|
|
|
page=page,
|
|
|
|
page_size=page_size,
|
|
|
|
top_level=not include_replies
|
|
|
|
)
|
|
|
|
for comment in result.get('items', []):
|
|
|
|
channel_url = comment.get('channel_url')
|
|
|
|
if not channel_url:
|
|
|
|
continue
|
|
|
|
resolve_response = await self.resolve([], [channel_url])
|
|
|
|
if isinstance(resolve_response[channel_url], Output):
|
|
|
|
comment['is_channel_signature_valid'] = comment_client.is_comment_signed_by_channel(
|
|
|
|
comment, resolve_response[channel_url]
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
comment['is_channel_signature_valid'] = False
|
|
|
|
if is_channel_signature_valid:
|
|
|
|
result['items'] = [
|
|
|
|
c for c in result.get('items', []) if c.get('is_channel_signature_valid', False)
|
|
|
|
]
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def comment_create(self, comment, claim_id=None, parent_id=None, channel_account_id=None,
|
|
|
|
channel_name=None, channel_id=None, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Create and associate a comment with a claim using your channel identity.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_create (<comment> | --comment=<comment>)
|
|
|
|
(<claim_id> | --claim_id=<claim_id> | --parent_id=<parent_id>)
|
|
|
|
(--channel_id=<channel_id> | --channel_name=<channel_name>)
|
|
|
|
[--channel_account_id=<channel_account_id>...] [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--comment=<comment> : (str) Comment to be made, should be at most 2000 characters.
|
|
|
|
--claim_id=<claim_id> : (str) The ID of the claim to comment on
|
|
|
|
--parent_id=<parent_id> : (str) The ID of a comment to make a response to
|
|
|
|
--channel_id=<channel_id> : (str) The ID of the channel you want to post under
|
|
|
|
--channel_name=<channel_name> : (str) The channel you want to post as, prepend with a '@'
|
|
|
|
--channel_account_id=<channel_account_id> : (str) one or more account ids for accounts to look in
|
|
|
|
for channel certificates, defaults to all accounts
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Comment object if successfully made, (None) otherwise
|
|
|
|
{
|
|
|
|
"comment": (str) The actual string as inputted by the user,
|
|
|
|
"comment_id": (str) The Comment's unique identifier,
|
|
|
|
"channel_name": (str) Name of the channel this was posted under, prepended with a '@',
|
|
|
|
"channel_id": (str) The Channel Claim ID that this comment was posted under,
|
|
|
|
"signature": (str) The signature of the comment,
|
|
|
|
"signing_ts": (str) The timestamp used to sign the comment,
|
|
|
|
"channel_url": (str) Channel's URI in the ClaimTrie,
|
|
|
|
"parent_id": (str) Comment this is replying to, (None) if this is the root,
|
|
|
|
"timestamp": (int) The time at which comment was entered into the server at, in nanoseconds.
|
|
|
|
}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
channel = await self.get_channel_or_error(
|
|
|
|
wallet, channel_account_id, channel_id, channel_name, for_signing=True
|
|
|
|
)
|
|
|
|
|
|
|
|
comment_body = {
|
|
|
|
'comment': comment.strip(),
|
|
|
|
'claim_id': claim_id,
|
|
|
|
'parent_id': parent_id,
|
|
|
|
'channel_id': channel.claim_id,
|
|
|
|
'channel_name': channel.claim_name,
|
|
|
|
}
|
|
|
|
comment_client.sign_comment(comment_body, channel)
|
|
|
|
|
|
|
|
response = await comment_client.post(self.conf.comment_server, 'create_comment', comment_body)
|
|
|
|
response.update({
|
|
|
|
'is_claim_signature_valid': comment_client.is_comment_signed_by_channel(response, channel)
|
|
|
|
})
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def comment_update(self, comment, comment_id, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Edit a comment published as one of your channels.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_update (<comment> | --comment=<comment>)
|
|
|
|
(<comment_id> | --comment_id=<comment_id>)
|
|
|
|
[--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--comment=<comment> : (str) New comment replacing the old one
|
|
|
|
--comment_id=<comment_id> : (str) Hash identifying the comment to edit
|
|
|
|
--wallet_id=<wallet_id : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Comment object if edit was successful, (None) otherwise
|
|
|
|
{
|
|
|
|
"comment": (str) The actual string as inputted by the user,
|
|
|
|
"comment_id": (str) The Comment's unique identifier,
|
|
|
|
"channel_name": (str) Name of the channel this was posted under, prepended with a '@',
|
|
|
|
"channel_id": (str) The Channel Claim ID that this comment was posted under,
|
|
|
|
"signature": (str) The signature of the comment,
|
|
|
|
"signing_ts": (str) Timestamp used to sign the most recent signature,
|
|
|
|
"channel_url": (str) Channel's URI in the ClaimTrie,
|
|
|
|
"parent_id": (str) Comment this is replying to, (None) if this is the root,
|
|
|
|
"timestamp": (int) The time at which comment was entered into the server at, in nanoseconds.
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
channel = await comment_client.post(
|
|
|
|
self.conf.comment_server,
|
|
|
|
'get_channel_from_comment_id',
|
|
|
|
comment_id=comment_id
|
|
|
|
)
|
|
|
|
if 'error' in channel:
|
|
|
|
raise ValueError(channel['error'])
|
|
|
|
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
# channel = await self.get_channel_or_none(wallet, None, **channel)
|
|
|
|
channel_claim = await self.get_channel_or_error(wallet, [], **channel)
|
|
|
|
edited_comment = {
|
|
|
|
'comment_id': comment_id,
|
|
|
|
'comment': comment,
|
|
|
|
'channel_id': channel_claim.claim_id,
|
|
|
|
'channel_name': channel_claim.claim_name
|
|
|
|
}
|
|
|
|
comment_client.sign_comment(edited_comment, channel_claim)
|
|
|
|
return await comment_client.post(
|
|
|
|
self.conf.comment_server, 'edit_comment', edited_comment
|
|
|
|
)
|
|
|
|
|
|
|
|
async def comment_abandon(self, comment_id, wallet_id=None):
|
|
|
|
"""
|
|
|
|
Abandon a comment published under your channel identity.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_abandon (<comment_id> | --comment_id=<comment_id>) [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--comment_id=<comment_id> : (str) The ID of the comment to be abandoned.
|
|
|
|
--wallet_id=<wallet_id : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) Object with the `comment_id` passed in as the key, and a flag indicating if it was abandoned
|
|
|
|
{
|
|
|
|
<comment_id> (str): {
|
|
|
|
"abandoned": (bool)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
abandon_comment_body = {'comment_id': comment_id}
|
|
|
|
channel = await comment_client.post(
|
|
|
|
self.conf.comment_server, 'get_channel_from_comment_id', comment_id=comment_id
|
|
|
|
)
|
|
|
|
if 'error' in channel:
|
|
|
|
return {comment_id: {'abandoned': False}}
|
|
|
|
channel = await self.get_channel_or_none(wallet, None, **channel)
|
|
|
|
abandon_comment_body.update({
|
|
|
|
'channel_id': channel.claim_id,
|
|
|
|
'channel_name': channel.claim_name,
|
|
|
|
})
|
|
|
|
comment_client.sign_comment(abandon_comment_body, channel, abandon=True)
|
|
|
|
return await comment_client.post(self.conf.comment_server, 'abandon_comment', abandon_comment_body)
|
|
|
|
|
|
|
|
async def comment_hide(self, comment_ids: Union[str, list], wallet_id=None):
|
|
|
|
"""
|
|
|
|
Hide a comment published to a claim you control.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
comment_hide <comment_ids>... [--wallet_id=<wallet_id>]
|
|
|
|
|
|
|
|
Options:
|
|
|
|
--comment_ids=<comment_ids> : (str, list) one or more comment_id to hide.
|
|
|
|
--wallet_id=<wallet_id> : (str) restrict operation to specific wallet
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(dict) keyed by comment_id, containing success info
|
|
|
|
'<comment_id>': {
|
|
|
|
"hidden": (bool) flag indicating if comment_id was hidden
|
|
|
|
}
|
|
|
|
"""
|
2020-05-06 16:53:31 +02:00
|
|
|
wallet = self.wallets.get_or_default(wallet_id)
|
2020-05-01 15:33:58 +02:00
|
|
|
|
|
|
|
if isinstance(comment_ids, str):
|
|
|
|
comment_ids = [comment_ids]
|
|
|
|
|
|
|
|
comments = await comment_client.post(
|
|
|
|
self.conf.comment_server, 'get_comments_by_id', comment_ids=comment_ids
|
|
|
|
)
|
|
|
|
claim_ids = {comment['claim_id'] for comment in comments}
|
|
|
|
claims = {cid: await self.ledger.get_claim_by_claim_id(wallet.accounts, claim_id=cid) for cid in claim_ids}
|
|
|
|
pieces = []
|
|
|
|
for comment in comments:
|
|
|
|
claim = claims.get(comment['claim_id'])
|
|
|
|
if claim:
|
|
|
|
channel = await self.get_channel_or_none(
|
|
|
|
wallet,
|
|
|
|
account_ids=[],
|
|
|
|
channel_id=claim.channel.claim_id,
|
|
|
|
channel_name=claim.channel.claim_name,
|
|
|
|
for_signing=True
|
|
|
|
)
|
|
|
|
piece = {'comment_id': comment['comment_id']}
|
|
|
|
comment_client.sign_comment(piece, channel, abandon=True)
|
|
|
|
pieces.append(piece)
|
|
|
|
return await comment_client.post(self.conf.comment_server, 'hide_comments', pieces=pieces)
|
|
|
|
|
|
|
|
|
|
|
|
class Client(API):
|
|
|
|
|
|
|
|
def __init__(self, service: Service, url):
|
|
|
|
super().__init__(service)
|
|
|
|
self.url = url
|
|
|
|
|
|
|
|
async def send(self, method, **kwargs):
|
|
|
|
async with aiohttp.ClientSession() as session:
|
|
|
|
try:
|
|
|
|
message = {'method': method, 'params': kwargs}
|
|
|
|
async with session.get(self.url, json=message) as resp:
|
|
|
|
try:
|
|
|
|
data = await resp.json()
|
|
|
|
if 'result' in data:
|
|
|
|
return data['result']
|
|
|
|
elif 'error' in data:
|
|
|
|
return data['error']
|
|
|
|
except Exception as e:
|
|
|
|
log.exception('Could not process response from server:', exc_info=e)
|
|
|
|
except aiohttp.ClientConnectionError:
|
|
|
|
print("Could not connect to daemon. Are you sure it's running?")
|
|
|
|
|
|
|
|
def __getattribute__(self, name):
|
|
|
|
if name in dir(API):
|
|
|
|
return partial(object.__getattribute__(self, 'send'), name)
|
|
|
|
return object.__getattribute__(self, name)
|