minor refactor and cleanup, generated api.json

This commit is contained in:
Lex Berezhny 2019-10-26 00:55:30 -04:00
parent ca6202fc19
commit d227660d6c
3 changed files with 127 additions and 133 deletions

File diff suppressed because one or more lines are too long

View file

@ -1,4 +1,3 @@
import math
import os import os
import asyncio import asyncio
import logging import logging
@ -150,7 +149,7 @@ def paginate_list(items: List, page: Optional[int], page_size: Optional[int]):
offset = page_size * (page - 1) offset = page_size * (page - 1)
subitems = [] subitems = []
if offset <= total_items: if offset <= total_items:
subitems = items[offset:page_size] subitems = items[offset:offset+page_size]
return { return {
"items": subitems, "items": subitems,
"total_pages": int((total_items + (page_size - 1)) / page_size), "total_pages": int((total_items + (page_size - 1)) / page_size),
@ -1775,7 +1774,7 @@ class Daemon(metaclass=JSONRPCServerType):
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>] [--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
[--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>] [--blobs_remaining=<blobs_remaining>] [--sort=<sort_by>]
[--comparison=<comparison>] [--full_status=<full_status>] [--reverse] [--comparison=<comparison>] [--full_status=<full_status>] [--reverse]
[--page=<page> --page_size=<page_size>] [--page=<page>] [--page_size=<page_size>]
Options: Options:
--sd_hash=<sd_hash> : (str) get file with matching sd hash --sd_hash=<sd_hash> : (str) get file with matching sd hash
@ -1797,29 +1796,14 @@ class Daemon(metaclass=JSONRPCServerType):
--page=<page> : (int) page to return during paginating --page=<page> : (int) page to return during paginating
--page_size=<page_size> : (int) number of items on page during pagination --page_size=<page_size> : (int) number of items on page during pagination
Returns: {Paginated[Output]} Returns: {Paginated[File]}
""" """
sort = sort or 'rowid' sort = sort or 'rowid'
comparison = comparison or 'eq' comparison = comparison or 'eq'
return paginate_list(
items = self.stream_manager.get_filtered_streams( self.stream_manager.get_filtered_streams(sort, reverse, comparison, **kwargs), page, page_size
sort, reverse, comparison, **kwargs
) )
total_items = len(items)
page = page or 1
page_size = page_size if page_size else len(items) or 1
offset = page_size * (page-1)
items = items[offset:offset+page_size]
return {
'total_items': total_items,
'total_pages': math.ceil(total_items / page_size),
'page': page,
'page_size': page_size,
'items': items
}
@requires(STREAM_MANAGER_COMPONENT) @requires(STREAM_MANAGER_COMPONENT)
async def jsonrpc_file_set_status(self, status, **kwargs): async def jsonrpc_file_set_status(self, status, **kwargs):
""" """

View file

@ -9,12 +9,10 @@ from lbry.blob_exchange.downloader import BlobDownloader
class FileCommands(CommandTestCase): class FileCommands(CommandTestCase):
VERBOSITY = logging.WARN
async def create_streams_in_range(self, *args, **kwargs): async def create_streams_in_range(self, *args, **kwargs):
self.stream_claim_ids = [] self.stream_claim_ids = []
for i in range(*args, **kwargs): for i in range(*args, **kwargs):
t = await self.stream_create(f'Stream_{i}', '0.00001', b'This is a stream') t = await self.stream_create(f'Stream_{i}', '0.00001')
self.stream_claim_ids.append(t['outputs'][0]['claim_id']) self.stream_claim_ids.append(t['outputs'][0]['claim_id'])
async def test_file_management(self): async def test_file_management(self):
@ -55,7 +53,7 @@ class FileCommands(CommandTestCase):
self.assertTrue( self.assertTrue(
await self.daemon.jsonrpc_file_delete(claim_name=claim_name, delete_from_download_dir=True) await self.daemon.jsonrpc_file_delete(claim_name=claim_name, delete_from_download_dir=True)
) )
self.assertEqual(len(self.daemon.jsonrpc_file_list()), 0) self.assertItemCount(self.daemon.jsonrpc_file_list(), 0)
self.assertFalse(os.path.isfile(full_path)) self.assertFalse(os.path.isfile(full_path))
async def test_publish_with_illegal_chars(self): async def test_publish_with_illegal_chars(self):
@ -68,7 +66,7 @@ class FileCommands(CommandTestCase):
prefix, suffix = 'derp?', '.ext.' prefix, suffix = 'derp?', '.ext.'
san_prefix, san_suffix = 'derp', '.ext' san_prefix, san_suffix = 'derp', '.ext'
tx = await self.stream_create(claim_name, '0.01', prefix=prefix, suffix=suffix) tx = await self.stream_create(claim_name, '0.01', prefix=prefix, suffix=suffix)
stream = self.daemon.jsonrpc_file_list()[0] stream = self.daemon.jsonrpc_file_list()['items'][0]
claim_id = self.get_claim_id(tx) claim_id = self.get_claim_id(tx)
# Assert that file list and source contains the local unsanitized name, but suggested name is sanitized # Assert that file list and source contains the local unsanitized name, but suggested name is sanitized
@ -88,7 +86,7 @@ class FileCommands(CommandTestCase):
# Re-download deleted file and assert that the file name is sanitized # Re-download deleted file and assert that the file name is sanitized
full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path
stream_file_name = os.path.basename(full_path) stream_file_name = os.path.basename(full_path)
stream = self.daemon.jsonrpc_file_list()[0] stream = self.daemon.jsonrpc_file_list()['items'][0]
file_list_name = stream.file_name file_list_name = stream.file_name
suggested_file_name = stream.descriptor.suggested_file_name suggested_file_name = stream.descriptor.suggested_file_name
@ -111,7 +109,7 @@ class FileCommands(CommandTestCase):
san_prefix, san_suffix = 'derpyderp', '.ext' san_prefix, san_suffix = 'derpyderp', '.ext'
tx = await self.stream_update(claim_id, data=b'amazing content', prefix=prefix, suffix=suffix) tx = await self.stream_update(claim_id, data=b'amazing content', prefix=prefix, suffix=suffix)
full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path full_path = (await self.daemon.jsonrpc_get('lbry://' + claim_name, save_file=True)).full_path
updated_stream = self.daemon.jsonrpc_file_list()[0] updated_stream = self.daemon.jsonrpc_file_list()['items'][0]
stream_file_name = os.path.basename(full_path) stream_file_name = os.path.basename(full_path)
source_file_name = tx['outputs'][0]['value']['source']['name'] source_file_name = tx['outputs'][0]['value']['source']['name']