2018-07-25 01:36:09 +02:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import json
|
|
|
|
import inspect
|
2019-03-01 15:40:40 +01:00
|
|
|
import tempfile
|
2019-04-06 21:33:19 +02:00
|
|
|
import asyncio
|
2019-04-22 03:34:13 +02:00
|
|
|
import time
|
2019-03-01 15:40:40 +01:00
|
|
|
from docopt import docopt
|
2019-04-22 03:34:13 +02:00
|
|
|
from binascii import unhexlify
|
2019-04-06 21:33:19 +02:00
|
|
|
from textwrap import indent
|
2019-12-31 21:47:15 +01:00
|
|
|
from lbry.testcase import CommandTestCase
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.extras.cli import set_kwargs, get_argument_parser
|
2020-01-03 07:44:22 +01:00
|
|
|
from lbry.extras.daemon.daemon import (
|
2019-04-06 21:33:19 +02:00
|
|
|
Daemon, jsonrpc_dumps_pretty, encode_pagination_doc
|
|
|
|
)
|
2021-01-27 21:11:28 +01:00
|
|
|
from tests.integration.other.test_comment_commands import MockedCommentServer
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.extras.daemon.json_response_encoder import (
|
2019-09-20 15:43:58 +02:00
|
|
|
encode_tx_doc, encode_txo_doc, encode_account_doc, encode_file_doc,
|
|
|
|
encode_wallet_doc
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
2019-03-01 15:40:40 +01:00
|
|
|
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
RETURN_DOCS = {
|
|
|
|
'Account': encode_account_doc(),
|
2019-09-20 15:43:58 +02:00
|
|
|
'Wallet': encode_wallet_doc(),
|
2019-04-06 21:33:19 +02:00
|
|
|
'File': encode_file_doc(),
|
|
|
|
'Transaction': encode_tx_doc(),
|
|
|
|
'Output': encode_txo_doc(),
|
2020-10-16 17:35:08 +02:00
|
|
|
'Address': 'an address in base58',
|
|
|
|
'Dict': 'glorious data in dictionary',
|
2019-04-06 21:33:19 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-03-01 15:40:40 +01:00
|
|
|
class ExampleRecorder:
|
|
|
|
def __init__(self, test):
|
|
|
|
self.test = test
|
|
|
|
self.examples = {}
|
|
|
|
|
|
|
|
async def __call__(self, title, *command):
|
|
|
|
parser = get_argument_parser()
|
|
|
|
args, command_args = parser.parse_known_args(command)
|
|
|
|
|
|
|
|
api_method_name = args.api_method_name
|
|
|
|
parsed = docopt(args.doc, command_args)
|
|
|
|
kwargs = set_kwargs(parsed)
|
2019-04-06 21:33:19 +02:00
|
|
|
for k, v in kwargs.items():
|
|
|
|
if v and isinstance(v, str) and (v[0], v[-1]) == ('"', '"'):
|
|
|
|
kwargs[k] = v[1:-1]
|
2019-03-01 15:40:40 +01:00
|
|
|
params = json.dumps({"method": api_method_name, "params": kwargs})
|
|
|
|
|
|
|
|
method = getattr(self.test.daemon, f'jsonrpc_{api_method_name}')
|
2019-04-06 21:33:19 +02:00
|
|
|
result = method(**kwargs)
|
|
|
|
if asyncio.iscoroutine(result):
|
|
|
|
result = await result
|
|
|
|
output = jsonrpc_dumps_pretty(result, ledger=self.test.daemon.ledger)
|
2019-03-01 15:40:40 +01:00
|
|
|
self.examples.setdefault(api_method_name, []).append({
|
|
|
|
'title': title,
|
|
|
|
'curl': f"curl -d'{params}' http://localhost:5279/",
|
|
|
|
'lbrynet': 'lbrynet ' + ' '.join(command),
|
|
|
|
'python': f'requests.post("http://localhost:5279", json={params}).json()',
|
|
|
|
'output': output.strip()
|
|
|
|
})
|
|
|
|
return json.loads(output)['result']
|
|
|
|
|
|
|
|
|
|
|
|
class Examples(CommandTestCase):
|
|
|
|
|
|
|
|
async def asyncSetUp(self):
|
|
|
|
await super().asyncSetUp()
|
2021-01-27 21:11:28 +01:00
|
|
|
self.daemon.conf.comment_server = 'http://localhost:2903/api'
|
|
|
|
self.comment_server = MockedCommentServer(2903)
|
|
|
|
await self.comment_server.start()
|
|
|
|
self.addCleanup(self.comment_server.stop)
|
2019-03-01 15:40:40 +01:00
|
|
|
self.recorder = ExampleRecorder(self)
|
|
|
|
|
|
|
|
async def play(self):
|
|
|
|
r = self.recorder
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# general sdk
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Get status',
|
|
|
|
'status'
|
|
|
|
)
|
|
|
|
await r(
|
|
|
|
'Get version',
|
|
|
|
'version'
|
|
|
|
)
|
2019-09-03 17:30:10 +02:00
|
|
|
|
|
|
|
# settings
|
|
|
|
|
2019-03-01 15:40:40 +01:00
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Get settings',
|
|
|
|
'settings', 'get'
|
|
|
|
)
|
|
|
|
|
2019-09-03 17:30:10 +02:00
|
|
|
await r(
|
|
|
|
'Set settings',
|
|
|
|
'settings', 'set', '"tcp_port"', '99'
|
|
|
|
)
|
|
|
|
|
|
|
|
# preferences
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Set preference',
|
|
|
|
'preference', 'set', '"theme"', '"dark"'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Get preferences',
|
|
|
|
'preference', 'get'
|
|
|
|
)
|
|
|
|
|
2019-09-20 15:43:58 +02:00
|
|
|
# wallets
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List your wallets',
|
|
|
|
'wallet', 'list'
|
|
|
|
)
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# accounts
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List your accounts',
|
2019-03-01 15:40:40 +01:00
|
|
|
'account', 'list'
|
|
|
|
)
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
account = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Create an account',
|
2019-04-06 21:33:19 +02:00
|
|
|
'account', 'create', '"generated account"'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Remove an account',
|
2019-04-06 21:33:19 +02:00
|
|
|
'account', 'remove', account['id']
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Add an account from seed',
|
2019-04-06 21:33:19 +02:00
|
|
|
'account', 'add', '"new account"', f"--seed=\"{account['seed']}\""
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Modify maximum number of times a change address can be reused',
|
2019-04-06 21:33:19 +02:00
|
|
|
'account', 'set', account['id'], '--change_max_uses=10'
|
|
|
|
)
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# addresses
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List addresses in default account',
|
|
|
|
'address', 'list'
|
|
|
|
)
|
|
|
|
|
|
|
|
an_address = await r(
|
|
|
|
'Get an unused address',
|
|
|
|
'address', 'unused'
|
|
|
|
)
|
|
|
|
|
|
|
|
address_list_by_id = await r(
|
|
|
|
'List addresses in specified account',
|
|
|
|
'address', 'list', f"--account_id=\"{account['id']}\""
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Check if address is mine',
|
|
|
|
'address', 'is_mine', an_address
|
|
|
|
)
|
|
|
|
|
|
|
|
# sends/funds
|
|
|
|
|
|
|
|
transfer = await r(
|
|
|
|
'Transfer 2 LBC from default account to specific account',
|
|
|
|
'account', 'fund', f"--to_account=\"{account['id']}\"", "--amount=2.0", "--broadcast"
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.on_transaction_dict(transfer)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(transfer)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Get default account balance',
|
|
|
|
'account', 'balance'
|
|
|
|
)
|
|
|
|
|
|
|
|
txlist = await r(
|
|
|
|
'List your transactions',
|
|
|
|
'transaction', 'list'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Get balance for specific account by id',
|
|
|
|
'account', 'balance', f"\"{account['id']}\""
|
|
|
|
)
|
|
|
|
|
|
|
|
spread_transaction = await r(
|
|
|
|
'Spread LBC between multiple addresses',
|
|
|
|
'account', 'fund', f"--to_account=\"{account['id']}\"", f"--from_account=\"{account['id']}\"", '--amount=1.5', '--outputs=2', '--broadcast'
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.on_transaction_dict(spread_transaction)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(spread_transaction)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Transfer all LBC to a specified account',
|
|
|
|
'account', 'fund', f"--from_account=\"{account['id']}\"", "--everything", "--broadcast"
|
|
|
|
)
|
|
|
|
|
|
|
|
# channels
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
channel = await r(
|
2019-04-22 03:34:13 +02:00
|
|
|
'Create a channel claim without metadata',
|
2019-04-06 21:33:19 +02:00
|
|
|
'channel', 'create', '@channel', '1.0'
|
|
|
|
)
|
2019-06-24 01:58:41 +02:00
|
|
|
channel_id = self.get_claim_id(channel)
|
2019-04-06 21:33:19 +02:00
|
|
|
await self.on_transaction_dict(channel)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(channel)
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
await r(
|
|
|
|
'List your channel claims',
|
|
|
|
'channel', 'list'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Paginate your channel claims',
|
|
|
|
'channel', 'list', '--page=1', '--page_size=20'
|
|
|
|
)
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
channel = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Update a channel claim',
|
2019-06-24 01:58:41 +02:00
|
|
|
'channel', 'update', self.get_claim_id(channel), '--title="New Channel"'
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
2019-04-12 14:58:00 +02:00
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
await self.on_transaction_dict(channel)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(channel)
|
|
|
|
|
2019-04-22 03:34:13 +02:00
|
|
|
big_channel = await r(
|
|
|
|
'Create a channel claim with all metadata',
|
|
|
|
'channel', 'create', '@bigchannel', '1.0',
|
|
|
|
'--title="Big Channel"', '--description="A channel with lots of videos."',
|
|
|
|
'--email="creator@smallmedia.com"', '--tags=music', '--tags=art',
|
|
|
|
'--languages=pt-BR', '--languages=uk', '--locations=BR', '--locations=UA::Kiyv',
|
|
|
|
'--website_url="http://smallmedia.com"', '--thumbnail_url="http://smallmedia.com/logo.jpg"',
|
|
|
|
'--cover_url="http://smallmedia.com/logo.jpg"'
|
|
|
|
)
|
|
|
|
await self.on_transaction_dict(big_channel)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(big_channel)
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.daemon.jsonrpc_channel_abandon(self.get_claim_id(big_channel))
|
2019-04-22 03:34:13 +02:00
|
|
|
await self.generate(1)
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# stream claims
|
|
|
|
|
2019-03-01 15:40:40 +01:00
|
|
|
with tempfile.NamedTemporaryFile() as file:
|
|
|
|
file.write(b'hello world')
|
|
|
|
file.flush()
|
2019-04-06 21:33:19 +02:00
|
|
|
stream = await r(
|
2019-04-22 03:34:13 +02:00
|
|
|
'Create a stream claim without metadata',
|
2019-04-06 21:33:19 +02:00
|
|
|
'stream', 'create', 'astream', '1.0', file.name
|
2019-03-01 15:40:40 +01:00
|
|
|
)
|
2019-04-06 21:33:19 +02:00
|
|
|
await self.on_transaction_dict(stream)
|
2019-03-01 15:40:40 +01:00
|
|
|
await self.generate(1)
|
2019-04-06 21:33:19 +02:00
|
|
|
await self.on_transaction_dict(stream)
|
2019-06-24 01:58:41 +02:00
|
|
|
stream_id = self.get_claim_id(stream)
|
2019-04-12 14:58:00 +02:00
|
|
|
stream_name = stream['outputs'][0]['name']
|
2019-04-06 21:33:19 +02:00
|
|
|
stream = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Update a stream claim to add channel',
|
|
|
|
'stream', 'update', stream_id,
|
2019-04-22 03:34:13 +02:00
|
|
|
f'--channel_id="{channel_id}"'
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
|
|
|
await self.on_transaction_dict(stream)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(stream)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'List all your claims',
|
2019-04-06 21:33:19 +02:00
|
|
|
'claim', 'list'
|
|
|
|
)
|
2019-03-01 15:40:40 +01:00
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Paginate your claims',
|
2019-04-06 21:33:19 +02:00
|
|
|
'claim', 'list', '--page=1', '--page_size=20'
|
2019-03-01 15:40:40 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'List all your stream claims',
|
2019-04-06 21:33:19 +02:00
|
|
|
'stream', 'list'
|
2019-03-01 15:40:40 +01:00
|
|
|
)
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Paginate your stream claims',
|
2019-04-06 21:33:19 +02:00
|
|
|
'stream', 'list', '--page=1', '--page_size=20'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Search for all claims in channel',
|
2019-05-28 04:50:26 +02:00
|
|
|
'claim', 'search', '--channel=@channel'
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Search for claims matching a name',
|
2019-04-22 03:34:13 +02:00
|
|
|
'claim', 'search', f'--name="{stream_name}"'
|
2019-04-12 14:58:00 +02:00
|
|
|
)
|
|
|
|
|
2019-04-22 03:34:13 +02:00
|
|
|
with tempfile.NamedTemporaryFile(suffix='.png') as file:
|
|
|
|
file.write(unhexlify(
|
|
|
|
b'89504e470d0a1a0a0000000d49484452000000050000000708020000004fc'
|
|
|
|
b'510b9000000097048597300000b1300000b1301009a9c1800000015494441'
|
|
|
|
b'5408d763fcffff3f031260624005d4e603004c45030b5286e9ea000000004'
|
|
|
|
b'9454e44ae426082'
|
|
|
|
))
|
|
|
|
file.flush()
|
|
|
|
big_stream = await r(
|
|
|
|
'Create an image stream claim with all metadata and fee',
|
|
|
|
'stream', 'create', 'blank-image', '1.0', file.name,
|
|
|
|
'--tags=blank', '--tags=art', '--languages=en', '--locations=US:NH:Manchester',
|
|
|
|
'--fee_currency=LBC', '--fee_amount=0.3',
|
|
|
|
'--title="Blank Image"', '--description="A blank PNG that is 5x7."', '--author=Picaso',
|
|
|
|
'--license="Public Domain"', '--license_url=http://public-domain.org',
|
|
|
|
'--thumbnail_url="http://smallmedia.com/thumbnail.jpg"', f'--release_time={int(time.time())}',
|
|
|
|
f'--channel_id="{channel_id}"'
|
|
|
|
)
|
|
|
|
await self.on_transaction_dict(big_stream)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(big_stream)
|
2020-01-04 02:23:40 +01:00
|
|
|
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.daemon.jsonrpc_channel_abandon(self.get_claim_id(big_stream))
|
2019-04-22 03:34:13 +02:00
|
|
|
await self.generate(1)
|
|
|
|
|
2020-01-04 02:23:40 +01:00
|
|
|
# comments
|
|
|
|
|
|
|
|
comment = await r(
|
|
|
|
'Posting a comment as your channel',
|
|
|
|
'comment', 'create', '--comment="Thank you Based God"',
|
|
|
|
'--channel_name=@channel', f'--claim_id={stream_id}'
|
|
|
|
)
|
|
|
|
|
|
|
|
reply = await r(
|
2020-02-27 01:58:03 +01:00
|
|
|
'Use the parent_id param to make replies',
|
2020-01-04 02:23:40 +01:00
|
|
|
'comment', 'create',
|
|
|
|
'--comment="I have photographic evidence confirming Sasquatch exists"',
|
2020-12-08 03:13:43 +01:00
|
|
|
f'--channel_name=@channel', f'--parent_id={comment["comment_id"]}',
|
|
|
|
f'--claim_id={stream_id}'
|
2020-01-04 02:23:40 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List all comments on a claim',
|
|
|
|
'comment', 'list', stream_id, '--include_replies'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List a comment thread replying to a top level comment',
|
|
|
|
'comment', 'list', stream_id,
|
|
|
|
f'--parent_id={comment["comment_id"]}'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Edit the contents of a comment',
|
2020-01-10 00:40:01 +01:00
|
|
|
'comment', 'update', 'Where there was once sasquatch, there is not',
|
2020-01-04 02:23:40 +01:00
|
|
|
f'--comment_id={comment["comment_id"]}'
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.daemon.jsonrpc_comment_abandon(reply['comment_id'])
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# files
|
|
|
|
|
2019-10-26 06:57:51 +02:00
|
|
|
file_list_result = (await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'List local files',
|
|
|
|
'file', 'list'
|
2019-10-26 06:57:51 +02:00
|
|
|
))['items']
|
2019-04-12 14:58:00 +02:00
|
|
|
file_uri = f"{file_list_result[0]['claim_name']}#{file_list_result[0]['claim_id']}"
|
|
|
|
await r(
|
|
|
|
'Resolve a claim',
|
|
|
|
'resolve', file_uri
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'List files matching a parameter',
|
|
|
|
'file', 'list', f"--claim_id=\"{file_list_result[0]['claim_id']}\""
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Delete a file',
|
|
|
|
'file', 'delete', f"--claim_id=\"{file_list_result[0]['claim_id']}\""
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Get a file',
|
|
|
|
'get', file_uri
|
|
|
|
)
|
|
|
|
|
2019-04-18 21:48:02 +02:00
|
|
|
await r(
|
|
|
|
'Save a file to the downloads directory',
|
|
|
|
'file', 'save', f"--sd_hash=\"{file_list_result[0]['sd_hash']}\""
|
|
|
|
)
|
|
|
|
|
2019-04-12 14:58:00 +02:00
|
|
|
# blobs
|
|
|
|
|
|
|
|
bloblist = await r(
|
|
|
|
'List your local blobs',
|
|
|
|
'blob', 'list'
|
|
|
|
)
|
|
|
|
|
|
|
|
await r(
|
|
|
|
'Delete a blob',
|
2019-10-26 05:46:24 +02:00
|
|
|
'blob', 'delete', f"{bloblist['items'][0]}"
|
2019-04-12 14:58:00 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# abandon all the things
|
2019-04-06 21:33:19 +02:00
|
|
|
|
2020-01-04 02:23:40 +01:00
|
|
|
await r(
|
|
|
|
'Abandon a comment',
|
|
|
|
'comment', 'abandon', comment['comment_id']
|
|
|
|
)
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
abandon_stream = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Abandon a stream claim',
|
|
|
|
'stream', 'abandon', stream_id
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
|
|
|
await self.on_transaction_dict(abandon_stream)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(abandon_stream)
|
|
|
|
|
|
|
|
abandon_channel = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Abandon a channel claim',
|
|
|
|
'channel', 'abandon', channel_id
|
2019-04-06 21:33:19 +02:00
|
|
|
)
|
|
|
|
await self.on_transaction_dict(abandon_channel)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(abandon_channel)
|
|
|
|
|
|
|
|
with tempfile.NamedTemporaryFile() as file:
|
|
|
|
file.write(b'hello world')
|
|
|
|
file.flush()
|
|
|
|
stream = await r(
|
2019-04-12 14:58:00 +02:00
|
|
|
'Publish a file',
|
2019-04-06 21:33:19 +02:00
|
|
|
'publish', 'a-new-stream', '--bid=1.0', f'--file_path={file.name}'
|
|
|
|
)
|
|
|
|
await self.on_transaction_dict(stream)
|
|
|
|
await self.generate(1)
|
|
|
|
await self.on_transaction_dict(stream)
|
|
|
|
|
2019-03-01 15:40:40 +01:00
|
|
|
|
|
|
|
def get_examples():
|
|
|
|
player = Examples('play')
|
2019-04-06 21:33:19 +02:00
|
|
|
result = player.run()
|
|
|
|
if result.errors:
|
|
|
|
for error in result.errors:
|
|
|
|
print(error[1])
|
|
|
|
raise Exception('See above for errors while running the examples.')
|
2019-03-01 15:40:40 +01:00
|
|
|
return player.recorder.examples
|
2018-07-25 01:36:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
SECTIONS = re.compile("(.*?)Usage:(.*?)Options:(.*?)Returns:(.*)", re.DOTALL)
|
2019-10-02 18:38:56 +02:00
|
|
|
REQUIRED_OPTIONS = re.compile(r"\(<(.*?)>.*?\)")
|
2018-07-25 01:36:09 +02:00
|
|
|
ARGUMENT_NAME = re.compile("--([^=]+)")
|
2019-10-02 18:38:56 +02:00
|
|
|
ARGUMENT_TYPE = re.compile(r"\s*\((.*?)\)(.*)")
|
2018-07-25 01:36:09 +02:00
|
|
|
|
|
|
|
|
2019-04-06 21:33:19 +02:00
|
|
|
def get_return_def(returns):
|
|
|
|
result = returns.strip()
|
|
|
|
if (result[0], result[-1]) == ('{', '}'):
|
|
|
|
obj_type = result[1:-1]
|
|
|
|
if '[' in obj_type:
|
|
|
|
sub_type = obj_type[obj_type.index('[')+1:-1]
|
|
|
|
obj_type = obj_type[:obj_type.index('[')]
|
|
|
|
if obj_type == 'Paginated':
|
|
|
|
obj_def = encode_pagination_doc(RETURN_DOCS[sub_type])
|
|
|
|
elif obj_type == 'List':
|
|
|
|
obj_def = [RETURN_DOCS[sub_type]]
|
|
|
|
else:
|
|
|
|
raise NameError(f'Unknown return type: {obj_type}')
|
|
|
|
else:
|
|
|
|
obj_def = RETURN_DOCS[obj_type]
|
|
|
|
return indent(json.dumps(obj_def, indent=4), ' '*12)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-03-01 15:40:40 +01:00
|
|
|
def get_api(name, examples):
|
|
|
|
obj = Daemon.callable_methods[name]
|
2018-07-25 01:36:09 +02:00
|
|
|
docstr = inspect.getdoc(obj).strip()
|
|
|
|
|
|
|
|
try:
|
|
|
|
description, usage, options, returns = SECTIONS.search(docstr).groups()
|
|
|
|
except:
|
2018-10-18 12:42:45 +02:00
|
|
|
raise ValueError(f"Doc string format error for {obj.__name__}.")
|
2018-07-25 01:36:09 +02:00
|
|
|
|
|
|
|
required = re.findall(REQUIRED_OPTIONS, usage)
|
|
|
|
|
|
|
|
arguments = []
|
|
|
|
for line in options.splitlines():
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
if line.startswith('--'):
|
|
|
|
arg, desc = line.split(':', 1)
|
|
|
|
arg_name = ARGUMENT_NAME.search(arg).group(1)
|
|
|
|
arg_type, arg_desc = ARGUMENT_TYPE.search(desc).groups()
|
|
|
|
arguments.append({
|
|
|
|
'name': arg_name.strip(),
|
|
|
|
'type': arg_type.strip(),
|
|
|
|
'description': [arg_desc.strip()],
|
|
|
|
'is_required': arg_name in required
|
|
|
|
})
|
|
|
|
elif line == 'None':
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
arguments[-1]['description'].append(line.strip())
|
|
|
|
|
|
|
|
for arg in arguments:
|
|
|
|
arg['description'] = ' '.join(arg['description'])
|
|
|
|
|
|
|
|
return {
|
2019-01-21 20:32:44 +01:00
|
|
|
'name': name,
|
2018-07-25 01:36:09 +02:00
|
|
|
'description': description.strip(),
|
|
|
|
'arguments': arguments,
|
2019-04-06 21:33:19 +02:00
|
|
|
'returns': get_return_def(returns),
|
2019-03-01 15:40:40 +01:00
|
|
|
'examples': examples
|
2018-07-25 01:36:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def write_api(f):
|
2019-03-01 15:40:40 +01:00
|
|
|
examples = get_examples()
|
2019-04-11 21:35:24 +02:00
|
|
|
api_definitions = Daemon.get_api_definitions()
|
|
|
|
apis = {
|
|
|
|
'main': {
|
|
|
|
'doc': 'Ungrouped commands.',
|
|
|
|
'commands': []
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for group_name, group_doc in api_definitions['groups'].items():
|
|
|
|
apis[group_name] = {
|
|
|
|
'doc': group_doc,
|
|
|
|
'commands': []
|
|
|
|
}
|
|
|
|
for method_name, command in api_definitions['commands'].items():
|
|
|
|
if 'replaced_by' in command:
|
|
|
|
continue
|
|
|
|
apis[command['group'] or 'main']['commands'].append(get_api(
|
2019-03-01 15:40:40 +01:00
|
|
|
method_name,
|
|
|
|
examples.get(method_name, [])
|
|
|
|
))
|
2018-07-25 01:36:09 +02:00
|
|
|
json.dump(apis, f, indent=4)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2018-09-24 05:22:25 +02:00
|
|
|
parent = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
html_file = os.path.join(parent, 'docs', 'api.json')
|
2018-07-25 01:36:09 +02:00
|
|
|
with open(html_file, 'w+') as f:
|
|
|
|
write_api(f)
|