forked from LBRYCommunity/lbry-sdk
Compare commits
37 commits
torrent_st
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
eb5da9511e | ||
|
8722ef840e | ||
|
6e75a1a89b | ||
|
ef3189de1d | ||
|
c2d2080034 | ||
|
d0b5a0a8fd | ||
|
1d0e17be21 | ||
|
4ef03bb1f4 | ||
|
4bd4bcdc27 | ||
|
e5ca967fa2 | ||
|
eed7d02e8b | ||
|
02aecad52b | ||
|
585962d930 | ||
|
ea4fba39a6 | ||
|
7a86406746 | ||
|
c8a3eb97a4 | ||
|
20213628d7 | ||
|
2d1649f972 | ||
|
5cb04b86a0 | ||
|
93ab6b3be3 | ||
|
b9762c3e64 | ||
|
82592d00ef | ||
|
c118174c1a | ||
|
d284acd8b8 | ||
|
235c98372d | ||
|
d2f5073ef4 | ||
|
84e5e43117 | ||
|
7bd025ae54 | ||
|
8f28ce65b0 | ||
|
d36e305129 | ||
|
2609dee8fb | ||
|
a2da86d4b5 | ||
|
aa16c7fee5 | ||
|
3266f72b82 | ||
|
77cd2a3f8a | ||
|
308e586e9a | ||
84beddfd77 |
28 changed files with 118 additions and 114 deletions
54
.github/workflows/main.yml
vendored
54
.github/workflows/main.yml
vendored
|
@ -1,5 +1,5 @@
|
||||||
name: ci
|
name: ci
|
||||||
on: ["push", "pull_request"]
|
on: ["push", "pull_request", "workflow_dispatch"]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
@ -7,12 +7,12 @@ jobs:
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v1
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.9'
|
||||||
- name: extract pip cache
|
- name: extract pip cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pip
|
path: ~/.cache/pip
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
||||||
|
@ -31,21 +31,21 @@ jobs:
|
||||||
- windows-latest
|
- windows-latest
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v1
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.9'
|
||||||
- name: set pip cache dir
|
- name: set pip cache dir
|
||||||
id: pip-cache
|
shell: bash
|
||||||
run: echo "::set-output name=dir::$(pip cache dir)"
|
run: echo "PIP_CACHE_DIR=$(pip cache dir)" >> $GITHUB_ENV
|
||||||
- name: extract pip cache
|
- name: extract pip cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
path: ${{ env.PIP_CACHE_DIR }}
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
||||||
restore-keys: ${{ runner.os }}-pip-
|
restore-keys: ${{ runner.os }}-pip-
|
||||||
- id: os-name
|
- id: os-name
|
||||||
uses: ASzc/change-string-case-action@v1
|
uses: ASzc/change-string-case-action@v5
|
||||||
with:
|
with:
|
||||||
string: ${{ runner.os }}
|
string: ${{ runner.os }}
|
||||||
- run: python -m pip install --user --upgrade pip wheel
|
- run: python -m pip install --user --upgrade pip wheel
|
||||||
|
@ -93,16 +93,16 @@ jobs:
|
||||||
uses: elastic/elastic-github-actions/elasticsearch@master
|
uses: elastic/elastic-github-actions/elasticsearch@master
|
||||||
with:
|
with:
|
||||||
stack-version: 7.12.1
|
stack-version: 7.12.1
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v1
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.9'
|
||||||
- if: matrix.test == 'other'
|
- if: matrix.test == 'other'
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends ffmpeg
|
sudo apt-get install -y --no-install-recommends ffmpeg
|
||||||
- name: extract pip cache
|
- name: extract pip cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ./.tox
|
path: ./.tox
|
||||||
key: tox-integration-${{ matrix.test }}-${{ hashFiles('setup.py') }}
|
key: tox-integration-${{ matrix.test }}-${{ hashFiles('setup.py') }}
|
||||||
|
@ -138,26 +138,26 @@ jobs:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os:
|
os:
|
||||||
- ubuntu-18.04
|
- ubuntu-20.04
|
||||||
- macos-latest
|
- macos-latest
|
||||||
- windows-latest
|
- windows-latest
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v1
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.7'
|
python-version: '3.9'
|
||||||
- id: os-name
|
- id: os-name
|
||||||
uses: ASzc/change-string-case-action@v1
|
uses: ASzc/change-string-case-action@v5
|
||||||
with:
|
with:
|
||||||
string: ${{ runner.os }}
|
string: ${{ runner.os }}
|
||||||
- name: set pip cache dir
|
- name: set pip cache dir
|
||||||
id: pip-cache
|
shell: bash
|
||||||
run: echo "::set-output name=dir::$(pip cache dir)"
|
run: echo "PIP_CACHE_DIR=$(pip cache dir)" >> $GITHUB_ENV
|
||||||
- name: extract pip cache
|
- name: extract pip cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
path: ${{ env.PIP_CACHE_DIR }}
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
|
||||||
restore-keys: ${{ runner.os }}-pip-
|
restore-keys: ${{ runner.os }}-pip-
|
||||||
- run: pip install pyinstaller==4.6
|
- run: pip install pyinstaller==4.6
|
||||||
|
@ -175,7 +175,7 @@ jobs:
|
||||||
pip install pywin32==301
|
pip install pywin32==301
|
||||||
pyinstaller --additional-hooks-dir=scripts/. --icon=icons/lbry256.ico --onefile --name lbrynet lbry/extras/cli.py
|
pyinstaller --additional-hooks-dir=scripts/. --icon=icons/lbry256.ico --onefile --name lbrynet lbry/extras/cli.py
|
||||||
dist/lbrynet.exe --version
|
dist/lbrynet.exe --version
|
||||||
- uses: actions/upload-artifact@v2
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: lbrynet-${{ steps.os-name.outputs.lowercase }}
|
name: lbrynet-${{ steps.os-name.outputs.lowercase }}
|
||||||
path: dist/
|
path: dist/
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = "0.112.0"
|
__version__ = "0.113.0"
|
||||||
version = tuple(map(int, __version__.split('.'))) # pylint: disable=invalid-name
|
version = tuple(map(int, __version__.split('.'))) # pylint: disable=invalid-name
|
||||||
|
|
|
@ -64,7 +64,7 @@ class BlobDownloader:
|
||||||
self.scores[peer] = bytes_received / elapsed if bytes_received and elapsed else 1
|
self.scores[peer] = bytes_received / elapsed if bytes_received and elapsed else 1
|
||||||
|
|
||||||
async def new_peer_or_finished(self):
|
async def new_peer_or_finished(self):
|
||||||
active_tasks = list(self.active_connections.values()) + [asyncio.sleep(1)]
|
active_tasks = list(self.active_connections.values()) + [asyncio.create_task(asyncio.sleep(1))]
|
||||||
await asyncio.wait(active_tasks, return_when='FIRST_COMPLETED')
|
await asyncio.wait(active_tasks, return_when='FIRST_COMPLETED')
|
||||||
|
|
||||||
def cleanup_active(self):
|
def cleanup_active(self):
|
||||||
|
|
11
lbry/conf.py
11
lbry/conf.py
|
@ -688,6 +688,9 @@ class Config(CLIConfig):
|
||||||
tracker_servers = Servers("BitTorrent-compatible (BEP15) UDP trackers for helping P2P discovery", [
|
tracker_servers = Servers("BitTorrent-compatible (BEP15) UDP trackers for helping P2P discovery", [
|
||||||
('tracker.lbry.com', 9252),
|
('tracker.lbry.com', 9252),
|
||||||
('tracker.lbry.grin.io', 9252),
|
('tracker.lbry.grin.io', 9252),
|
||||||
|
('tracker.lbry.pigg.es', 9252),
|
||||||
|
('tracker.lizard.technology', 9252),
|
||||||
|
('s1.lbry.network', 9252),
|
||||||
])
|
])
|
||||||
|
|
||||||
lbryum_servers = Servers("SPV wallet servers", [
|
lbryum_servers = Servers("SPV wallet servers", [
|
||||||
|
@ -700,14 +703,20 @@ class Config(CLIConfig):
|
||||||
('spv17.lbry.com', 50001),
|
('spv17.lbry.com', 50001),
|
||||||
('spv18.lbry.com', 50001),
|
('spv18.lbry.com', 50001),
|
||||||
('spv19.lbry.com', 50001),
|
('spv19.lbry.com', 50001),
|
||||||
|
('hub.lbry.grin.io', 50001),
|
||||||
|
('hub.lizard.technology', 50001),
|
||||||
|
('s1.lbry.network', 50001),
|
||||||
])
|
])
|
||||||
known_dht_nodes = Servers("Known nodes for bootstrapping connection to the DHT", [
|
known_dht_nodes = Servers("Known nodes for bootstrapping connection to the DHT", [
|
||||||
('dht.lbry.grin.io', 4444), # Grin
|
('dht.lbry.grin.io', 4444), # Grin
|
||||||
('dht.lbry.madiator.com', 4444), # Madiator
|
('dht.lbry.madiator.com', 4444), # Madiator
|
||||||
|
('dht.lbry.pigg.es', 4444), # Pigges
|
||||||
('lbrynet1.lbry.com', 4444), # US EAST
|
('lbrynet1.lbry.com', 4444), # US EAST
|
||||||
('lbrynet2.lbry.com', 4444), # US WEST
|
('lbrynet2.lbry.com', 4444), # US WEST
|
||||||
('lbrynet3.lbry.com', 4444), # EU
|
('lbrynet3.lbry.com', 4444), # EU
|
||||||
('lbrynet4.lbry.com', 4444) # ASIA
|
('lbrynet4.lbry.com', 4444), # ASIA
|
||||||
|
('dht.lizard.technology', 4444), # Jack
|
||||||
|
('s2.lbry.network', 4444),
|
||||||
])
|
])
|
||||||
|
|
||||||
# blockchain
|
# blockchain
|
||||||
|
|
|
@ -42,8 +42,6 @@ class BlobAnnouncer:
|
||||||
log.debug("failed to announce %s, could only find %d peers, retrying soon.", blob_hash[:8], peers)
|
log.debug("failed to announce %s, could only find %d peers, retrying soon.", blob_hash[:8], peers)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
self.announcements_sent_metric.labels(peers=0, error=True).inc()
|
self.announcements_sent_metric.labels(peers=0, error=True).inc()
|
||||||
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
|
||||||
raise err
|
|
||||||
log.warning("error announcing %s: %s", blob_hash[:8], str(err))
|
log.warning("error announcing %s: %s", blob_hash[:8], str(err))
|
||||||
|
|
||||||
async def _announce(self, batch_size: typing.Optional[int] = 10):
|
async def _announce(self, batch_size: typing.Optional[int] = 10):
|
||||||
|
|
|
@ -37,7 +37,7 @@ class Component(metaclass=ComponentType):
|
||||||
def running(self):
|
def running(self):
|
||||||
return self._running
|
return self._running
|
||||||
|
|
||||||
async def get_status(self):
|
async def get_status(self): # pylint: disable=no-self-use
|
||||||
return
|
return
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
|
|
|
@ -118,7 +118,7 @@ class ComponentManager:
|
||||||
component._setup() for component in stage if not component.running
|
component._setup() for component in stage if not component.running
|
||||||
]
|
]
|
||||||
if needing_start:
|
if needing_start:
|
||||||
await asyncio.wait(needing_start)
|
await asyncio.wait(map(asyncio.create_task, needing_start))
|
||||||
self.started.set()
|
self.started.set()
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
|
@ -131,7 +131,7 @@ class ComponentManager:
|
||||||
component._stop() for component in stage if component.running
|
component._stop() for component in stage if component.running
|
||||||
]
|
]
|
||||||
if needing_stop:
|
if needing_stop:
|
||||||
await asyncio.wait(needing_stop)
|
await asyncio.wait(map(asyncio.create_task, needing_stop))
|
||||||
|
|
||||||
def all_components_running(self, *component_names):
|
def all_components_running(self, *component_names):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -374,7 +374,7 @@ class FileManagerComponent(Component):
|
||||||
log.info('Done setting up file manager')
|
log.info('Done setting up file manager')
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
self.file_manager.stop()
|
await self.file_manager.stop()
|
||||||
|
|
||||||
|
|
||||||
class BackgroundDownloaderComponent(Component):
|
class BackgroundDownloaderComponent(Component):
|
||||||
|
@ -560,8 +560,6 @@ class UPnPComponent(Component):
|
||||||
self.upnp = await UPnP.discover(loop=self.component_manager.loop)
|
self.upnp = await UPnP.discover(loop=self.component_manager.loop)
|
||||||
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
|
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
|
||||||
raise
|
|
||||||
log.warning("upnp discovery failed: %s", err)
|
log.warning("upnp discovery failed: %s", err)
|
||||||
self.upnp = None
|
self.upnp = None
|
||||||
|
|
||||||
|
|
|
@ -614,7 +614,8 @@ class Daemon(metaclass=JSONRPCServerType):
|
||||||
content_type='application/json'
|
content_type='application/json'
|
||||||
)
|
)
|
||||||
|
|
||||||
async def handle_metrics_get_request(self, request: web.Request):
|
@staticmethod
|
||||||
|
async def handle_metrics_get_request(request: web.Request):
|
||||||
try:
|
try:
|
||||||
return web.Response(
|
return web.Response(
|
||||||
text=prom_generate_latest().decode(),
|
text=prom_generate_latest().decode(),
|
||||||
|
|
|
@ -80,8 +80,6 @@ class MarketFeed:
|
||||||
self.rate = ExchangeRate(self.market, rate, int(time.time()))
|
self.rate = ExchangeRate(self.market, rate, int(time.time()))
|
||||||
self.last_check = time.time()
|
self.last_check = time.time()
|
||||||
return self.rate
|
return self.rate
|
||||||
except asyncio.CancelledError:
|
|
||||||
raise
|
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
log.warning("Timed out fetching exchange rate from %s.", self.name)
|
log.warning("Timed out fetching exchange rate from %s.", self.name)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
|
|
|
@ -793,7 +793,7 @@ class SQLiteStorage(SQLiteMixin):
|
||||||
|
|
||||||
await self.db.run(_save_claims)
|
await self.db.run(_save_claims)
|
||||||
if update_file_callbacks:
|
if update_file_callbacks:
|
||||||
await asyncio.wait(update_file_callbacks)
|
await asyncio.wait(map(asyncio.create_task, update_file_callbacks))
|
||||||
if claim_id_to_supports:
|
if claim_id_to_supports:
|
||||||
await self.save_supports(claim_id_to_supports)
|
await self.save_supports(claim_id_to_supports)
|
||||||
|
|
||||||
|
|
|
@ -50,10 +50,10 @@ class FileManager:
|
||||||
await manager.started.wait()
|
await manager.started.wait()
|
||||||
self.started.set()
|
self.started.set()
|
||||||
|
|
||||||
def stop(self):
|
async def stop(self):
|
||||||
for manager in self.source_managers.values():
|
for manager in self.source_managers.values():
|
||||||
# fixme: pop or not?
|
# fixme: pop or not?
|
||||||
manager.stop()
|
await manager.stop()
|
||||||
self.started.clear()
|
self.started.clear()
|
||||||
|
|
||||||
@cache_concurrent
|
@cache_concurrent
|
||||||
|
@ -99,8 +99,6 @@ class FileManager:
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
raise ResolveTimeoutError(uri)
|
raise ResolveTimeoutError(uri)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
if isinstance(err, asyncio.CancelledError):
|
|
||||||
raise
|
|
||||||
log.exception("Unexpected error resolving stream:")
|
log.exception("Unexpected error resolving stream:")
|
||||||
raise ResolveError(f"Unexpected error resolving stream: {str(err)}")
|
raise ResolveError(f"Unexpected error resolving stream: {str(err)}")
|
||||||
if 'error' in resolved_result:
|
if 'error' in resolved_result:
|
||||||
|
@ -249,7 +247,7 @@ class FileManager:
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
error = DownloadDataTimeoutError(stream.sd_hash)
|
error = DownloadDataTimeoutError(stream.sd_hash)
|
||||||
raise error
|
raise error
|
||||||
except Exception as err: # forgive data timeout, don't delete stream
|
except (Exception, asyncio.CancelledError) as err: # forgive data timeout, don't delete stream
|
||||||
expected = (DownloadSDTimeoutError, DownloadDataTimeoutError, InsufficientFundsError,
|
expected = (DownloadSDTimeoutError, DownloadDataTimeoutError, InsufficientFundsError,
|
||||||
KeyFeeAboveMaxAllowedError, ResolveError, InvalidStreamURLError)
|
KeyFeeAboveMaxAllowedError, ResolveError, InvalidStreamURLError)
|
||||||
if isinstance(err, expected):
|
if isinstance(err, expected):
|
||||||
|
|
|
@ -67,7 +67,7 @@ class ManagedDownloadSource:
|
||||||
async def save_file(self, file_name: Optional[str] = None, download_directory: Optional[str] = None):
|
async def save_file(self, file_name: Optional[str] = None, download_directory: Optional[str] = None):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def stop_tasks(self):
|
async def stop_tasks(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def set_claim(self, claim_info: typing.Dict, claim: 'Claim'):
|
def set_claim(self, claim_info: typing.Dict, claim: 'Claim'):
|
||||||
|
|
|
@ -59,11 +59,11 @@ class SourceManager:
|
||||||
def add(self, source: ManagedDownloadSource):
|
def add(self, source: ManagedDownloadSource):
|
||||||
self._sources[source.identifier] = source
|
self._sources[source.identifier] = source
|
||||||
|
|
||||||
def remove(self, source: ManagedDownloadSource):
|
async def remove(self, source: ManagedDownloadSource):
|
||||||
if source.identifier not in self._sources:
|
if source.identifier not in self._sources:
|
||||||
return
|
return
|
||||||
self._sources.pop(source.identifier)
|
self._sources.pop(source.identifier)
|
||||||
source.stop_tasks()
|
await source.stop_tasks()
|
||||||
|
|
||||||
async def initialize_from_database(self):
|
async def initialize_from_database(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
@ -72,10 +72,10 @@ class SourceManager:
|
||||||
await self.initialize_from_database()
|
await self.initialize_from_database()
|
||||||
self.started.set()
|
self.started.set()
|
||||||
|
|
||||||
def stop(self):
|
async def stop(self):
|
||||||
while self._sources:
|
while self._sources:
|
||||||
_, source = self._sources.popitem()
|
_, source = self._sources.popitem()
|
||||||
source.stop_tasks()
|
await source.stop_tasks()
|
||||||
self.started.clear()
|
self.started.clear()
|
||||||
|
|
||||||
async def create(self, file_path: str, key: Optional[bytes] = None,
|
async def create(self, file_path: str, key: Optional[bytes] = None,
|
||||||
|
@ -83,7 +83,7 @@ class SourceManager:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
||||||
self.remove(source)
|
await self.remove(source)
|
||||||
if delete_file and source.output_file_exists:
|
if delete_file and source.output_file_exists:
|
||||||
os.remove(source.full_path)
|
os.remove(source.full_path)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ class BackgroundDownloader:
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return
|
return
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
|
log.debug("Cancelled background downloader")
|
||||||
raise
|
raise
|
||||||
except Exception:
|
except Exception:
|
||||||
log.error("Unexpected download error on background downloader")
|
log.error("Unexpected download error on background downloader")
|
||||||
|
|
|
@ -191,7 +191,7 @@ class ManagedStream(ManagedDownloadSource):
|
||||||
Stop any running save/stream tasks as well as the downloader and update the status in the database
|
Stop any running save/stream tasks as well as the downloader and update the status in the database
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.stop_tasks()
|
await self.stop_tasks()
|
||||||
if (finished and self.status != self.STATUS_FINISHED) or self.status == self.STATUS_RUNNING:
|
if (finished and self.status != self.STATUS_FINISHED) or self.status == self.STATUS_RUNNING:
|
||||||
await self.update_status(self.STATUS_FINISHED if finished else self.STATUS_STOPPED)
|
await self.update_status(self.STATUS_FINISHED if finished else self.STATUS_STOPPED)
|
||||||
|
|
||||||
|
@ -279,7 +279,7 @@ class ManagedStream(ManagedDownloadSource):
|
||||||
log.info("finished saving file for lbry://%s#%s (sd hash %s...) -> %s", self.claim_name, self.claim_id,
|
log.info("finished saving file for lbry://%s#%s (sd hash %s...) -> %s", self.claim_name, self.claim_id,
|
||||||
self.sd_hash[:6], self.full_path)
|
self.sd_hash[:6], self.full_path)
|
||||||
await self.blob_manager.storage.set_saved_file(self.stream_hash)
|
await self.blob_manager.storage.set_saved_file(self.stream_hash)
|
||||||
except Exception as err:
|
except (Exception, asyncio.CancelledError) as err:
|
||||||
if os.path.isfile(output_path):
|
if os.path.isfile(output_path):
|
||||||
log.warning("removing incomplete download %s for %s", output_path, self.sd_hash)
|
log.warning("removing incomplete download %s for %s", output_path, self.sd_hash)
|
||||||
os.remove(output_path)
|
os.remove(output_path)
|
||||||
|
@ -324,12 +324,13 @@ class ManagedStream(ManagedDownloadSource):
|
||||||
await asyncio.wait_for(self.started_writing.wait(), self.config.download_timeout)
|
await asyncio.wait_for(self.started_writing.wait(), self.config.download_timeout)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
log.warning("timeout starting to write data for lbry://%s#%s", self.claim_name, self.claim_id)
|
log.warning("timeout starting to write data for lbry://%s#%s", self.claim_name, self.claim_id)
|
||||||
self.stop_tasks()
|
await self.stop_tasks()
|
||||||
await self.update_status(ManagedStream.STATUS_STOPPED)
|
await self.update_status(ManagedStream.STATUS_STOPPED)
|
||||||
|
|
||||||
def stop_tasks(self):
|
async def stop_tasks(self):
|
||||||
if self.file_output_task and not self.file_output_task.done():
|
if self.file_output_task and not self.file_output_task.done():
|
||||||
self.file_output_task.cancel()
|
self.file_output_task.cancel()
|
||||||
|
await asyncio.gather(self.file_output_task, return_exceptions=True)
|
||||||
self.file_output_task = None
|
self.file_output_task = None
|
||||||
while self.streaming_responses:
|
while self.streaming_responses:
|
||||||
req, response = self.streaming_responses.pop()
|
req, response = self.streaming_responses.pop()
|
||||||
|
@ -366,7 +367,7 @@ class ManagedStream(ManagedDownloadSource):
|
||||||
return sent
|
return sent
|
||||||
except ConnectionError:
|
except ConnectionError:
|
||||||
return sent
|
return sent
|
||||||
except (OSError, Exception) as err:
|
except (OSError, Exception, asyncio.CancelledError) as err:
|
||||||
if isinstance(err, asyncio.CancelledError):
|
if isinstance(err, asyncio.CancelledError):
|
||||||
log.warning("stopped uploading %s#%s to reflector", self.claim_name, self.claim_id)
|
log.warning("stopped uploading %s#%s to reflector", self.claim_name, self.claim_id)
|
||||||
elif isinstance(err, OSError):
|
elif isinstance(err, OSError):
|
||||||
|
|
|
@ -164,8 +164,6 @@ class StreamManager(SourceManager):
|
||||||
async def reflect_streams(self):
|
async def reflect_streams(self):
|
||||||
try:
|
try:
|
||||||
return await self._reflect_streams()
|
return await self._reflect_streams()
|
||||||
except asyncio.CancelledError:
|
|
||||||
raise
|
|
||||||
except Exception:
|
except Exception:
|
||||||
log.exception("reflector task encountered an unexpected error!")
|
log.exception("reflector task encountered an unexpected error!")
|
||||||
|
|
||||||
|
@ -198,8 +196,8 @@ class StreamManager(SourceManager):
|
||||||
await super().start()
|
await super().start()
|
||||||
self.re_reflect_task = self.loop.create_task(self.reflect_streams())
|
self.re_reflect_task = self.loop.create_task(self.reflect_streams())
|
||||||
|
|
||||||
def stop(self):
|
async def stop(self):
|
||||||
super().stop()
|
await super().stop()
|
||||||
if self.resume_saving_task and not self.resume_saving_task.done():
|
if self.resume_saving_task and not self.resume_saving_task.done():
|
||||||
self.resume_saving_task.cancel()
|
self.resume_saving_task.cancel()
|
||||||
if self.re_reflect_task and not self.re_reflect_task.done():
|
if self.re_reflect_task and not self.re_reflect_task.done():
|
||||||
|
@ -226,7 +224,8 @@ class StreamManager(SourceManager):
|
||||||
)
|
)
|
||||||
return task
|
return task
|
||||||
|
|
||||||
async def _retriable_reflect_stream(self, stream, host, port):
|
@staticmethod
|
||||||
|
async def _retriable_reflect_stream(stream, host, port):
|
||||||
sent = await stream.upload_to_reflector(host, port)
|
sent = await stream.upload_to_reflector(host, port)
|
||||||
while not stream.is_fully_reflected and stream.reflector_progress > 0 and len(sent) > 0:
|
while not stream.is_fully_reflected and stream.reflector_progress > 0 and len(sent) > 0:
|
||||||
stream.reflector_progress = 0
|
stream.reflector_progress = 0
|
||||||
|
@ -261,7 +260,7 @@ class StreamManager(SourceManager):
|
||||||
return
|
return
|
||||||
if source.identifier in self.running_reflector_uploads:
|
if source.identifier in self.running_reflector_uploads:
|
||||||
self.running_reflector_uploads[source.identifier].cancel()
|
self.running_reflector_uploads[source.identifier].cancel()
|
||||||
source.stop_tasks()
|
await source.stop_tasks()
|
||||||
if source.identifier in self.streams:
|
if source.identifier in self.streams:
|
||||||
del self.streams[source.identifier]
|
del self.streams[source.identifier]
|
||||||
blob_hashes = [source.identifier] + [b.blob_hash for b in source.descriptor.blobs[:-1]]
|
blob_hashes = [source.identifier] + [b.blob_hash for b in source.descriptor.blobs[:-1]]
|
||||||
|
|
|
@ -74,7 +74,7 @@ class TorrentSource(ManagedDownloadSource):
|
||||||
def bt_infohash(self):
|
def bt_infohash(self):
|
||||||
return self.identifier
|
return self.identifier
|
||||||
|
|
||||||
def stop_tasks(self):
|
async def stop_tasks(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -118,8 +118,8 @@ class TorrentManager(SourceManager):
|
||||||
async def start(self):
|
async def start(self):
|
||||||
await super().start()
|
await super().start()
|
||||||
|
|
||||||
def stop(self):
|
async def stop(self):
|
||||||
super().stop()
|
await super().stop()
|
||||||
log.info("finished stopping the torrent manager")
|
log.info("finished stopping the torrent manager")
|
||||||
|
|
||||||
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
|
||||||
|
|
|
@ -141,7 +141,7 @@ class CoinSelector:
|
||||||
_) -> List[OutputEffectiveAmountEstimator]:
|
_) -> List[OutputEffectiveAmountEstimator]:
|
||||||
""" Accumulate UTXOs at random until there is enough to cover the target. """
|
""" Accumulate UTXOs at random until there is enough to cover the target. """
|
||||||
target = self.target + self.cost_of_change
|
target = self.target + self.cost_of_change
|
||||||
self.random.shuffle(txos, self.random.random)
|
self.random.shuffle(txos, random=self.random.random) # pylint: disable=deprecated-argument
|
||||||
selection = []
|
selection = []
|
||||||
amount = 0
|
amount = 0
|
||||||
for coin in txos:
|
for coin in txos:
|
||||||
|
|
|
@ -329,10 +329,10 @@ class Ledger(metaclass=LedgerRegistry):
|
||||||
async def start(self):
|
async def start(self):
|
||||||
if not os.path.exists(self.path):
|
if not os.path.exists(self.path):
|
||||||
os.mkdir(self.path)
|
os.mkdir(self.path)
|
||||||
await asyncio.wait([
|
await asyncio.wait(map(asyncio.create_task, [
|
||||||
self.db.open(),
|
self.db.open(),
|
||||||
self.headers.open()
|
self.headers.open()
|
||||||
])
|
]))
|
||||||
fully_synced = self.on_ready.first
|
fully_synced = self.on_ready.first
|
||||||
asyncio.create_task(self.network.start())
|
asyncio.create_task(self.network.start())
|
||||||
await self.network.on_connected.first
|
await self.network.on_connected.first
|
||||||
|
@ -466,9 +466,9 @@ class Ledger(metaclass=LedgerRegistry):
|
||||||
async def subscribe_accounts(self):
|
async def subscribe_accounts(self):
|
||||||
if self.network.is_connected and self.accounts:
|
if self.network.is_connected and self.accounts:
|
||||||
log.info("Subscribe to %i accounts", len(self.accounts))
|
log.info("Subscribe to %i accounts", len(self.accounts))
|
||||||
await asyncio.wait([
|
await asyncio.wait(map(asyncio.create_task, [
|
||||||
self.subscribe_account(a) for a in self.accounts
|
self.subscribe_account(a) for a in self.accounts
|
||||||
])
|
]))
|
||||||
|
|
||||||
async def subscribe_account(self, account: Account):
|
async def subscribe_account(self, account: Account):
|
||||||
for address_manager in account.address_managers.values():
|
for address_manager in account.address_managers.values():
|
||||||
|
@ -938,9 +938,7 @@ class Ledger(metaclass=LedgerRegistry):
|
||||||
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims. ",
|
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims. ",
|
||||||
account.id, balance, total_receiving, account.receiving.gap, total_change,
|
account.id, balance, total_receiving, account.receiving.gap, total_change,
|
||||||
account.change.gap, channel_count, len(account.channel_keys), claim_count)
|
account.change.gap, channel_count, len(account.channel_keys), claim_count)
|
||||||
except Exception as err:
|
except Exception:
|
||||||
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
|
||||||
raise
|
|
||||||
log.exception(
|
log.exception(
|
||||||
'Failed to display wallet state, please file issue '
|
'Failed to display wallet state, please file issue '
|
||||||
'for this bug along with the traceback you see below:')
|
'for this bug along with the traceback you see below:')
|
||||||
|
@ -963,9 +961,7 @@ class Ledger(metaclass=LedgerRegistry):
|
||||||
claim_ids = [p.purchased_claim_id for p in purchases]
|
claim_ids = [p.purchased_claim_id for p in purchases]
|
||||||
try:
|
try:
|
||||||
resolved, _, _, _ = await self.claim_search([], claim_ids=claim_ids)
|
resolved, _, _, _ = await self.claim_search([], claim_ids=claim_ids)
|
||||||
except Exception as err:
|
except Exception:
|
||||||
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
|
||||||
raise
|
|
||||||
log.exception("Resolve failed while looking up purchased claim ids:")
|
log.exception("Resolve failed while looking up purchased claim ids:")
|
||||||
resolved = []
|
resolved = []
|
||||||
lookup = {claim.claim_id: claim for claim in resolved}
|
lookup = {claim.claim_id: claim for claim in resolved}
|
||||||
|
@ -1045,9 +1041,7 @@ class Ledger(metaclass=LedgerRegistry):
|
||||||
claim_ids = collection.claim.collection.claims.ids[offset:page_size + offset]
|
claim_ids = collection.claim.collection.claims.ids[offset:page_size + offset]
|
||||||
try:
|
try:
|
||||||
resolve_results, _, _, _ = await self.claim_search([], claim_ids=claim_ids)
|
resolve_results, _, _, _ = await self.claim_search([], claim_ids=claim_ids)
|
||||||
except Exception as err:
|
except Exception:
|
||||||
if isinstance(err, asyncio.CancelledError): # TODO: remove when updated to 3.8
|
|
||||||
raise
|
|
||||||
log.exception("Resolve failed while looking up collection claim ids:")
|
log.exception("Resolve failed while looking up collection claim ids:")
|
||||||
return []
|
return []
|
||||||
claims = []
|
claims = []
|
||||||
|
|
|
@ -117,7 +117,7 @@ class ClientSession(BaseClientSession):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await asyncio.sleep(max(0, max_idle - (now - self.last_send)))
|
await asyncio.sleep(max(0, max_idle - (now - self.last_send)))
|
||||||
except Exception as err:
|
except (Exception, asyncio.CancelledError) as err:
|
||||||
if isinstance(err, asyncio.CancelledError):
|
if isinstance(err, asyncio.CancelledError):
|
||||||
log.info("closing connection to %s:%i", *self.server)
|
log.info("closing connection to %s:%i", *self.server)
|
||||||
else:
|
else:
|
||||||
|
@ -214,7 +214,7 @@ class Network:
|
||||||
def loop_task_done_callback(f):
|
def loop_task_done_callback(f):
|
||||||
try:
|
try:
|
||||||
f.result()
|
f.result()
|
||||||
except Exception:
|
except (Exception, asyncio.CancelledError):
|
||||||
if self.running:
|
if self.running:
|
||||||
log.exception("wallet server connection loop crashed")
|
log.exception("wallet server connection loop crashed")
|
||||||
|
|
||||||
|
@ -312,7 +312,8 @@ class Network:
|
||||||
sleep_delay = 30
|
sleep_delay = 30
|
||||||
while self.running:
|
while self.running:
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[asyncio.sleep(30), self._urgent_need_reconnect.wait()], return_when=asyncio.FIRST_COMPLETED
|
map(asyncio.create_task, [asyncio.sleep(30), self._urgent_need_reconnect.wait()]),
|
||||||
|
return_when=asyncio.FIRST_COMPLETED
|
||||||
)
|
)
|
||||||
if self._urgent_need_reconnect.is_set():
|
if self._urgent_need_reconnect.is_set():
|
||||||
sleep_delay = 30
|
sleep_delay = 30
|
||||||
|
@ -338,7 +339,7 @@ class Network:
|
||||||
try:
|
try:
|
||||||
if not self._urgent_need_reconnect.is_set():
|
if not self._urgent_need_reconnect.is_set():
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[self._keepalive_task, self._urgent_need_reconnect.wait()],
|
[self._keepalive_task, asyncio.create_task(self._urgent_need_reconnect.wait())],
|
||||||
return_when=asyncio.FIRST_COMPLETED
|
return_when=asyncio.FIRST_COMPLETED
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -264,8 +264,7 @@ class SPVNode:
|
||||||
await self.server.start()
|
await self.server.start()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.stopped = True
|
self.stopped = True
|
||||||
if not isinstance(e, asyncio.CancelledError):
|
log.exception("failed to start spv node")
|
||||||
log.exception("failed to start spv node")
|
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
async def stop(self, cleanup=True):
|
async def stop(self, cleanup=True):
|
||||||
|
|
|
@ -28,6 +28,7 @@ disable=
|
||||||
no-else-return,
|
no-else-return,
|
||||||
cyclic-import,
|
cyclic-import,
|
||||||
missing-docstring,
|
missing-docstring,
|
||||||
|
consider-using-f-string,
|
||||||
duplicate-code,
|
duplicate-code,
|
||||||
expression-not-assigned,
|
expression-not-assigned,
|
||||||
inconsistent-return-statements,
|
inconsistent-return-statements,
|
||||||
|
|
6
setup.py
6
setup.py
|
@ -18,7 +18,7 @@ setup(
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
keywords="lbry protocol media",
|
keywords="lbry protocol media",
|
||||||
license='MIT',
|
license='MIT',
|
||||||
python_requires='>=3.7',
|
python_requires='>=3.8',
|
||||||
packages=find_packages(exclude=('tests',)),
|
packages=find_packages(exclude=('tests',)),
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
entry_points={
|
entry_points={
|
||||||
|
@ -36,7 +36,7 @@ setup(
|
||||||
'distro==1.4.0',
|
'distro==1.4.0',
|
||||||
'base58==1.0.0',
|
'base58==1.0.0',
|
||||||
'cffi==1.13.2',
|
'cffi==1.13.2',
|
||||||
'cryptography==2.5',
|
'cryptography==3.4.7',
|
||||||
'protobuf==3.17.2',
|
'protobuf==3.17.2',
|
||||||
'prometheus_client==0.7.1',
|
'prometheus_client==0.7.1',
|
||||||
'ecdsa==0.13.3',
|
'ecdsa==0.13.3',
|
||||||
|
@ -50,7 +50,7 @@ setup(
|
||||||
],
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
'lint': [
|
'lint': [
|
||||||
'pylint==2.10.0'
|
'pylint==2.13.9'
|
||||||
],
|
],
|
||||||
'test': [
|
'test': [
|
||||||
'coverage',
|
'coverage',
|
||||||
|
|
|
@ -61,16 +61,14 @@ def mock_network_loop(loop: asyncio.AbstractEventLoop,
|
||||||
dht_network[from_addr] = protocol
|
dht_network[from_addr] = protocol
|
||||||
return transport, protocol
|
return transport, protocol
|
||||||
|
|
||||||
with mock.patch('socket.socket') as mock_socket:
|
mock_sock = mock.Mock(spec=socket.socket)
|
||||||
mock_sock = mock.Mock(spec=socket.socket)
|
mock_sock.setsockopt = lambda *_: None
|
||||||
mock_sock.setsockopt = lambda *_: None
|
mock_sock.bind = lambda *_: None
|
||||||
mock_sock.bind = lambda *_: None
|
mock_sock.setblocking = lambda *_: None
|
||||||
mock_sock.setblocking = lambda *_: None
|
mock_sock.getsockname = lambda: "0.0.0.0"
|
||||||
mock_sock.getsockname = lambda: "0.0.0.0"
|
mock_sock.getpeername = lambda: ""
|
||||||
mock_sock.getpeername = lambda: ""
|
mock_sock.close = lambda: None
|
||||||
mock_sock.close = lambda: None
|
mock_sock.type = socket.SOCK_DGRAM
|
||||||
mock_sock.type = socket.SOCK_DGRAM
|
mock_sock.fileno = lambda: 7
|
||||||
mock_sock.fileno = lambda: 7
|
loop.create_datagram_endpoint = create_datagram_endpoint
|
||||||
mock_socket.return_value = mock_sock
|
yield
|
||||||
loop.create_datagram_endpoint = create_datagram_endpoint
|
|
||||||
yield
|
|
||||||
|
|
|
@ -354,7 +354,7 @@ class FileCommands(CommandTestCase):
|
||||||
await self.daemon.jsonrpc_get('lbry://foo')
|
await self.daemon.jsonrpc_get('lbry://foo')
|
||||||
with open(original_path, 'wb') as handle:
|
with open(original_path, 'wb') as handle:
|
||||||
handle.write(b'some other stuff was there instead')
|
handle.write(b'some other stuff was there instead')
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didn't get set completed
|
await asyncio.wait_for(self.wait_files_to_complete(), timeout=5) # if this hangs, file didn't get set completed
|
||||||
# check that internal state got through up to the file list API
|
# check that internal state got through up to the file list API
|
||||||
|
@ -382,8 +382,7 @@ class FileCommands(CommandTestCase):
|
||||||
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
resp = await self.out(self.daemon.jsonrpc_get('lbry://foo', timeout=2))
|
||||||
self.assertNotIn('error', resp)
|
self.assertNotIn('error', resp)
|
||||||
self.assertTrue(os.path.isfile(path))
|
self.assertTrue(os.path.isfile(path))
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await asyncio.sleep(0.01) # FIXME: this sleep should not be needed
|
|
||||||
self.assertFalse(os.path.isfile(path))
|
self.assertFalse(os.path.isfile(path))
|
||||||
|
|
||||||
async def test_incomplete_downloads_retry(self):
|
async def test_incomplete_downloads_retry(self):
|
||||||
|
@ -478,7 +477,7 @@ class FileCommands(CommandTestCase):
|
||||||
|
|
||||||
# restart the daemon and make sure the fee is still there
|
# restart the daemon and make sure the fee is still there
|
||||||
|
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
||||||
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].content_fee.raw, raw_content_fee)
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'][0].content_fee.raw, raw_content_fee)
|
||||||
|
|
|
@ -3,7 +3,9 @@ import hashlib
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import aiohttp.web
|
import aiohttp.web
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
from lbry.file.source import ManagedDownloadSource
|
||||||
from lbry.utils import aiohttp_request
|
from lbry.utils import aiohttp_request
|
||||||
from lbry.blob.blob_file import MAX_BLOB_SIZE
|
from lbry.blob.blob_file import MAX_BLOB_SIZE
|
||||||
from lbry.testcase import CommandTestCase
|
from lbry.testcase import CommandTestCase
|
||||||
|
@ -21,7 +23,7 @@ def get_random_bytes(n: int) -> bytes:
|
||||||
|
|
||||||
class RangeRequests(CommandTestCase):
|
class RangeRequests(CommandTestCase):
|
||||||
async def _restart_stream_manager(self):
|
async def _restart_stream_manager(self):
|
||||||
self.daemon.file_manager.stop()
|
await self.daemon.file_manager.stop()
|
||||||
await self.daemon.file_manager.start()
|
await self.daemon.file_manager.start()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -352,14 +354,21 @@ class RangeRequests(CommandTestCase):
|
||||||
path = stream.full_path
|
path = stream.full_path
|
||||||
self.assertIsNotNone(path)
|
self.assertIsNotNone(path)
|
||||||
if wait_for_start_writing:
|
if wait_for_start_writing:
|
||||||
await stream.started_writing.wait()
|
with contextlib.suppress(asyncio.CancelledError):
|
||||||
|
await stream.started_writing.wait()
|
||||||
self.assertTrue(os.path.isfile(path))
|
self.assertTrue(os.path.isfile(path))
|
||||||
await self._restart_stream_manager()
|
await self.daemon.file_manager.stop()
|
||||||
|
# while stopped, we get no response to query and no file is present
|
||||||
|
self.assertEqual((await self.daemon.jsonrpc_file_list())['items'], [])
|
||||||
|
self.assertEqual(os.path.isfile(path), stream.status == ManagedDownloadSource.STATUS_FINISHED)
|
||||||
|
await self.daemon.file_manager.start()
|
||||||
|
# after restart, we get a response to query and same file path
|
||||||
stream = (await self.daemon.jsonrpc_file_list())['items'][0]
|
stream = (await self.daemon.jsonrpc_file_list())['items'][0]
|
||||||
self.assertIsNotNone(stream.full_path)
|
self.assertIsNotNone(stream.full_path)
|
||||||
self.assertFalse(os.path.isfile(path))
|
self.assertEqual(stream.full_path, path)
|
||||||
if wait_for_start_writing:
|
if wait_for_start_writing:
|
||||||
await stream.started_writing.wait()
|
with contextlib.suppress(asyncio.CancelledError):
|
||||||
|
await stream.started_writing.wait()
|
||||||
self.assertTrue(os.path.isfile(path))
|
self.assertTrue(os.path.isfile(path))
|
||||||
|
|
||||||
async def test_file_save_stop_before_finished_streaming_only_wait_for_start(self):
|
async def test_file_save_stop_before_finished_streaming_only_wait_for_start(self):
|
||||||
|
|
|
@ -424,7 +424,7 @@ class TestStreamManager(BlobExchangeTestBase):
|
||||||
self.assertIsNone(stream.full_path)
|
self.assertIsNone(stream.full_path)
|
||||||
self.assertEqual(0, stream.written_bytes)
|
self.assertEqual(0, stream.written_bytes)
|
||||||
|
|
||||||
self.stream_manager.stop()
|
await self.stream_manager.stop()
|
||||||
await self.stream_manager.start()
|
await self.stream_manager.start()
|
||||||
self.assertEqual(1, len(self.stream_manager.streams))
|
self.assertEqual(1, len(self.stream_manager.streams))
|
||||||
stream = list(self.stream_manager.streams.values())[0]
|
stream = list(self.stream_manager.streams.values())[0]
|
||||||
|
@ -449,7 +449,7 @@ class TestStreamManager(BlobExchangeTestBase):
|
||||||
stream = await self.file_manager.download_from_uri(self.uri, self.exchange_rate_manager)
|
stream = await self.file_manager.download_from_uri(self.uri, self.exchange_rate_manager)
|
||||||
await stream.finished_writing.wait()
|
await stream.finished_writing.wait()
|
||||||
await asyncio.sleep(0)
|
await asyncio.sleep(0)
|
||||||
self.stream_manager.stop()
|
await self.stream_manager.stop()
|
||||||
self.client_blob_manager.stop()
|
self.client_blob_manager.stop()
|
||||||
# partial removal, only sd blob is missing.
|
# partial removal, only sd blob is missing.
|
||||||
# in this case, we recover the sd blob while the other blobs are kept untouched as 'finished'
|
# in this case, we recover the sd blob while the other blobs are kept untouched as 'finished'
|
||||||
|
|
Loading…
Reference in a new issue