Merge pull request #2757 from lbryio/resolve_for_local_list

added `--resolve` to local `claim_list`/`stream_list`/`channel_list` commands which returns more metadata for local claims
This commit is contained in:
Lex Berezhny 2020-02-01 18:15:35 -05:00 committed by GitHub
commit 9b4417be87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 283 additions and 153 deletions

File diff suppressed because one or more lines are too long

View file

@ -2144,7 +2144,8 @@ class Daemon(metaclass=JSONRPCServerType):
"""
@requires(WALLET_COMPONENT)
def jsonrpc_claim_list(self, claim_type=None, account_id=None, wallet_id=None, page=None, page_size=None):
def jsonrpc_claim_list(
self, claim_type=None, account_id=None, wallet_id=None, page=None, page_size=None, resolve=False):
"""
List my stream and channel claims.
@ -2152,6 +2153,7 @@ class Daemon(metaclass=JSONRPCServerType):
claim_list [--claim_type=<claim_type>]
[--account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
[--resolve]
Options:
--claim_type=<claim_type> : (str) claim type: channel, stream, repost, collection
@ -2159,6 +2161,7 @@ class Daemon(metaclass=JSONRPCServerType):
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each claim to provide additional metadata
Returns: {Paginated[Output]}
"""
@ -2170,7 +2173,7 @@ class Daemon(metaclass=JSONRPCServerType):
else:
claims = partial(self.ledger.get_claims, wallet=wallet, accounts=wallet.accounts)
claim_count = partial(self.ledger.get_claim_count, wallet=wallet, accounts=wallet.accounts)
return paginate_rows(claims, claim_count, page, page_size, claim_type=claim_type)
return paginate_rows(claims, claim_count, page, page_size, claim_type=claim_type, resolve=resolve)
@requires(WALLET_COMPONENT)
async def jsonrpc_claim_search(self, **kwargs):
@ -2673,19 +2676,20 @@ class Daemon(metaclass=JSONRPCServerType):
return tx
@requires(WALLET_COMPONENT)
def jsonrpc_channel_list(self, account_id=None, wallet_id=None, page=None, page_size=None):
def jsonrpc_channel_list(self, account_id=None, wallet_id=None, page=None, page_size=None, resolve=False):
"""
List my channel claims.
Usage:
channel_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
[--page=<page>] [--page_size=<page_size>] [--resolve]
Options:
--account_id=<account_id> : (str) id of the account to use
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each channel to provide additional metadata
Returns: {Paginated[Output]}
"""
@ -2697,7 +2701,7 @@ class Daemon(metaclass=JSONRPCServerType):
else:
channels = partial(self.ledger.get_channels, wallet=wallet, accounts=wallet.accounts)
channel_count = partial(self.ledger.get_channel_count, wallet=wallet, accounts=wallet.accounts)
return paginate_rows(channels, channel_count, page, page_size)
return paginate_rows(channels, channel_count, page, page_size, resolve=resolve)
@requires(WALLET_COMPONENT)
async def jsonrpc_channel_export(self, channel_id=None, channel_name=None, account_id=None, wallet_id=None):
@ -3400,19 +3404,20 @@ class Daemon(metaclass=JSONRPCServerType):
return tx
@requires(WALLET_COMPONENT)
def jsonrpc_stream_list(self, account_id=None, wallet_id=None, page=None, page_size=None):
def jsonrpc_stream_list(self, account_id=None, wallet_id=None, page=None, page_size=None, resolve=False):
"""
List my stream claims.
Usage:
stream_list [<account_id> | --account_id=<account_id>] [--wallet_id=<wallet_id>]
[--page=<page>] [--page_size=<page_size>]
[--page=<page>] [--page_size=<page_size>] [--resolve]
Options:
--account_id=<account_id> : (str) id of the account to query
--wallet_id=<wallet_id> : (str) restrict results to specific wallet
--page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination
--resolve : (bool) resolves each stream to provide additional metadata
Returns: {Paginated[Output]}
"""
@ -3424,7 +3429,7 @@ class Daemon(metaclass=JSONRPCServerType):
else:
streams = partial(self.ledger.get_streams, wallet=wallet, accounts=wallet.accounts)
stream_count = partial(self.ledger.get_stream_count, wallet=wallet, accounts=wallet.accounts)
return paginate_rows(streams, stream_count, page, page_size)
return paginate_rows(streams, stream_count, page, page_size, resolve=resolve)
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
DHT_COMPONENT, DATABASE_COMPONENT)

View file

@ -555,6 +555,12 @@ class CommandTestCase(IntegrationTestCase):
async def claim_list(self, *args, **kwargs):
return (await self.out(self.daemon.jsonrpc_claim_list(*args, **kwargs)))['items']
async def stream_list(self, *args, **kwargs):
return (await self.out(self.daemon.jsonrpc_stream_list(*args, **kwargs)))['items']
async def channel_list(self, *args, **kwargs):
return (await self.out(self.daemon.jsonrpc_channel_list(*args, **kwargs)))['items']
@staticmethod
def get_claim_id(tx):
return tx['outputs'][0]['claim_id']

View file

@ -631,6 +631,7 @@ class Database(SQLiteMixin):
return txos
async def get_txo_count(self, **constraints):
constraints.pop('resolve', None)
constraints.pop('wallet', None)
constraints.pop('offset', None)
constraints.pop('limit', None)

View file

@ -732,20 +732,42 @@ class Ledger(metaclass=LedgerRegistry):
def get_purchase_count(self, resolve=False, **constraints):
return self.db.get_purchase_count(**constraints)
def get_claims(self, **constraints):
return self.db.get_claims(**constraints)
async def _resolve_for_local_results(self, accounts, txos):
results = []
response = await self.resolve(accounts, [txo.permanent_url for txo in txos])
for txo in txos:
resolved = response[txo.permanent_url]
if isinstance(resolved, Output):
results.append(resolved)
else:
if isinstance(resolved, dict) and 'error' in resolved:
txo.meta['error'] = resolved['error']
results.append(txo)
return results
async def get_claims(self, resolve=False, **constraints):
claims = await self.db.get_claims(**constraints)
if resolve:
return await self._resolve_for_local_results(constraints.get('accounts', []), claims)
return claims
def get_claim_count(self, **constraints):
return self.db.get_claim_count(**constraints)
def get_streams(self, **constraints):
return self.db.get_streams(**constraints)
async def get_streams(self, resolve=False, **constraints):
streams = await self.db.get_streams(**constraints)
if resolve:
return await self._resolve_for_local_results(constraints.get('accounts', []), streams)
return streams
def get_stream_count(self, **constraints):
return self.db.get_stream_count(**constraints)
def get_channels(self, **constraints):
return self.db.get_channels(**constraints)
async def get_channels(self, resolve=False, **constraints):
channels = await self.db.get_channels(**constraints)
if resolve:
return await self._resolve_for_local_results(constraints.get('accounts', []), channels)
return channels
def get_channel_count(self, **constraints):
return self.db.get_channel_count(**constraints)

View file

@ -382,6 +382,78 @@ class ClaimSearchCommand(ClaimTestCase):
await self.assertFindsClaims([], text='cloud')
class ClaimCommands(ClaimTestCase):
async def test_claim_stream_channel_list_with_resolve(self):
await self.channel_create()
await self.stream_create()
r = await self.claim_list()
self.assertNotIn('short_url', r[0])
self.assertNotIn('short_url', r[1])
self.assertNotIn('short_url', (await self.stream_list())[0])
self.assertNotIn('short_url', (await self.channel_list())[0])
r = await self.claim_list(resolve=True)
self.assertIn('short_url', r[0])
self.assertIn('short_url', r[1])
self.assertIn('short_url', (await self.stream_list(resolve=True))[0])
self.assertIn('short_url', (await self.channel_list(resolve=True))[0])
# unconfirmed channel won't resolve
channel_tx = await self.daemon.jsonrpc_channel_create('@foo', '1.0')
await self.ledger.wait(channel_tx)
r = await self.claim_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.channel_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling'])
# confirm it
await self.generate(1)
await self.ledger.wait(channel_tx, self.blockchain.block_expected)
# all channel claims resolve
r = await self.claim_list(resolve=True)
self.assertTrue(r[0]['meta']['is_controlling'])
self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.channel_list(resolve=True)
self.assertTrue(r[0]['meta']['is_controlling'])
self.assertTrue(r[1]['meta']['is_controlling'])
# unconfirmed stream won't resolve
stream_tx = await self.daemon.jsonrpc_stream_create(
'foo', '1.0', file_path=self.create_upload_file(data=b'hi')
)
await self.ledger.wait(stream_tx)
r = await self.claim_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.stream_list(resolve=True)
self.assertEqual('not_found', r[0]['meta']['error']['name'])
self.assertTrue(r[1]['meta']['is_controlling'])
# confirm it
await self.generate(1)
await self.ledger.wait(stream_tx, self.blockchain.block_expected)
# all claims resolve
r = await self.claim_list(resolve=True)
self.assertTrue(r[0]['meta']['is_controlling'])
self.assertTrue(r[1]['meta']['is_controlling'])
self.assertTrue(r[2]['meta']['is_controlling'])
self.assertTrue(r[3]['meta']['is_controlling'])
r = await self.stream_list(resolve=True)
self.assertTrue(r[0]['meta']['is_controlling'])
self.assertTrue(r[1]['meta']['is_controlling'])
r = await self.channel_list(resolve=True)
self.assertTrue(r[0]['meta']['is_controlling'])
self.assertTrue(r[1]['meta']['is_controlling'])
class ChannelCommands(CommandTestCase):
async def test_create_channel_names(self):