forked from LBRYCommunity/lbry-sdk
force close open streaming requests and api calls on shutdown
This commit is contained in:
parent
cbe689ea7f
commit
1f7feafb67
2 changed files with 9 additions and 5 deletions
|
@ -439,6 +439,7 @@ class Daemon(metaclass=JSONRPCServerType):
|
||||||
await self.component_manager.stop()
|
await self.component_manager.stop()
|
||||||
else:
|
else:
|
||||||
self.component_startup_task.cancel()
|
self.component_startup_task.cancel()
|
||||||
|
await self.runner.shutdown()
|
||||||
await self.runner.cleanup()
|
await self.runner.cleanup()
|
||||||
if self.analytics_manager.is_started:
|
if self.analytics_manager.is_started:
|
||||||
self.analytics_manager.stop()
|
self.analytics_manager.stop()
|
||||||
|
|
|
@ -94,7 +94,7 @@ class ManagedStream:
|
||||||
self.fully_reflected = asyncio.Event(loop=self.loop)
|
self.fully_reflected = asyncio.Event(loop=self.loop)
|
||||||
self.file_output_task: typing.Optional[asyncio.Task] = None
|
self.file_output_task: typing.Optional[asyncio.Task] = None
|
||||||
self.delayed_stop_task: typing.Optional[asyncio.Task] = None
|
self.delayed_stop_task: typing.Optional[asyncio.Task] = None
|
||||||
self.streaming_responses: typing.List[StreamResponse] = []
|
self.streaming_responses: typing.List[typing.Tuple[Request, StreamResponse]] = []
|
||||||
self.streaming = asyncio.Event(loop=self.loop)
|
self.streaming = asyncio.Event(loop=self.loop)
|
||||||
self._running = asyncio.Event(loop=self.loop)
|
self._running = asyncio.Event(loop=self.loop)
|
||||||
self.saving = asyncio.Event(loop=self.loop)
|
self.saving = asyncio.Event(loop=self.loop)
|
||||||
|
@ -311,7 +311,7 @@ class ManagedStream:
|
||||||
headers=headers
|
headers=headers
|
||||||
)
|
)
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
self.streaming_responses.append(response)
|
self.streaming_responses.append((request, response))
|
||||||
self.streaming.set()
|
self.streaming.set()
|
||||||
try:
|
try:
|
||||||
wrote = 0
|
wrote = 0
|
||||||
|
@ -329,8 +329,9 @@ class ManagedStream:
|
||||||
return response
|
return response
|
||||||
finally:
|
finally:
|
||||||
response.force_close()
|
response.force_close()
|
||||||
if response in self.streaming_responses:
|
if (request, response) in self.streaming_responses:
|
||||||
self.streaming_responses.remove(response)
|
self.streaming_responses.remove((request, response))
|
||||||
|
if not self.streaming_responses:
|
||||||
self.streaming.clear()
|
self.streaming.clear()
|
||||||
|
|
||||||
async def _save_file(self, output_path: str):
|
async def _save_file(self, output_path: str):
|
||||||
|
@ -394,7 +395,9 @@ class ManagedStream:
|
||||||
self.file_output_task.cancel()
|
self.file_output_task.cancel()
|
||||||
self.file_output_task = None
|
self.file_output_task = None
|
||||||
while self.streaming_responses:
|
while self.streaming_responses:
|
||||||
self.streaming_responses.pop().force_close()
|
req, response = self.streaming_responses.pop()
|
||||||
|
response.force_close()
|
||||||
|
req.transport.close()
|
||||||
self.downloader.stop()
|
self.downloader.stop()
|
||||||
self._running.clear()
|
self._running.clear()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue