forked from LBRYCommunity/lbry-sdk
Make FileManager.stop() async because SourceManager.stop() is now async.
This commit is contained in:
parent
6892c58735
commit
224896686f
4 changed files with 7 additions and 8 deletions
|
@ -374,7 +374,7 @@ class FileManagerComponent(Component):
|
||||||
log.info('Done setting up file manager')
|
log.info('Done setting up file manager')
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
self.file_manager.stop()
|
await self.file_manager.stop()
|
||||||
|
|
||||||
|
|
||||||
class BackgroundDownloaderComponent(Component):
|
class BackgroundDownloaderComponent(Component):
|
||||||
|
|
|
@ -50,10 +50,10 @@ class FileManager:
|
||||||
await manager.started.wait()
|
await manager.started.wait()
|
||||||
self.started.set()
|
self.started.set()
|
||||||
|
|
||||||
def stop(self):
|
async def stop(self):
|
||||||
for manager in self.source_managers.values():
|
for manager in self.source_managers.values():
|
||||||
# fixme: pop or not?
|
# fixme: pop or not?
|
||||||
manager.stop()
|
await manager.stop()
|
||||||
self.started.clear()
|
self.started.clear()
|
||||||
|
|
||||||
@cache_concurrent
|
@cache_concurrent
|
||||||
|
|
|
@ -354,7 +354,7 @@ class FileCommands(CommandTestCase):
|
||||||
await self.daemon.jsonrpc_get('lbry://foo')
|
await self.daemon.jsonrpc_get('lbry://foo')
|
||||||
with open(original_path, 'wb') as handle:
|
with open(original_path, 'wb') as handle:
|
||||||
handle.write(b'some other stuff was there instead')
|
handle.write(b'some other stuff was there instead')
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didn't get set completed
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didn't get set completed
|
||||||
# check that internal state got through up to the file list API
|
# check that internal state got through up to the file list API
|
||||||
|
@ -382,8 +382,7 @@ class FileCommands(CommandTestCase):
|
||||||
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
||||||
self.assertNotIn('error', resp)
|
self.assertNotIn('error', resp)
|
||||||
self.assertTrue(os.path.isfile(path))
|
self.assertTrue(os.path.isfile(path))
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await asyncio.sleep(0.01) # FIXME: this sleep should not be needed
|
|
||||||
self.assertFalse(os.path.isfile(path))
|
self.assertFalse(os.path.isfile(path))
|
||||||
|
|
||||||
async def test_incomplete_downloads_retry(self):
|
async def test_incomplete_downloads_retry(self):
|
||||||
|
@ -478,7 +477,7 @@ class FileCommands(CommandTestCase):
|
||||||
|
|
||||||
# restart the daemon and make sure the fee is still there
|
# restart the daemon and make sure the fee is still there
|
||||||
|
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
||||||
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].content_fee.raw, raw_content_fee)
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].content_fee.raw, raw_content_fee)
|
||||||
|
|
|
@ -21,7 +21,7 @@ def get_random_bytes(n: int) -> bytes:
|
||||||
|
|
||||||
class RangeRequests(CommandTestCase):
|
class RangeRequests(CommandTestCase):
|
||||||
async def _restart_stream_manager(self):
|
async def _restart_stream_manager(self):
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue