From 582f79ba1c8e74d703f5d96936a641f17ec1d30a Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 8 Jun 2022 11:08:42 -0300 Subject: [PATCH] do not consider pending blobs on disk space query --- lbry/extras/daemon/storage.py | 8 +++++--- tests/integration/datanetwork/test_file_commands.py | 7 ++++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/lbry/extras/daemon/storage.py b/lbry/extras/daemon/storage.py index eaac3301e..81b4263dc 100644 --- a/lbry/extras/daemon/storage.py +++ b/lbry/extras/daemon/storage.py @@ -449,7 +449,7 @@ class SQLiteStorage(SQLiteMixin): return await self.db.execute_fetchall( "select blob.blob_hash, blob.blob_length, blob.added_on " "from blob left join stream_blob using (blob_hash) " - "where stream_blob.stream_hash is null and blob.is_mine=? " + "where stream_blob.stream_hash is null and blob.is_mine=? and blob.status='finished'" "order by blob.blob_length desc, blob.added_on asc", (is_mine,) ) @@ -463,7 +463,8 @@ class SQLiteStorage(SQLiteMixin): content_blobs = await self.db.execute_fetchall( "select blob.blob_hash, blob.blob_length, blob.added_on " "from blob join stream_blob using (blob_hash) cross join stream using (stream_hash)" - "cross join file using (stream_hash) where blob.is_mine=? order by blob.added_on asc, blob.blob_length asc", + "cross join file using (stream_hash)" + "where blob.is_mine=? and blob.status='finished' order by blob.added_on asc, blob.blob_length asc", (is_mine,) ) return content_blobs + sd_blobs @@ -480,7 +481,8 @@ class SQLiteStorage(SQLiteMixin): coalesce(sum(case when is_mine=1 then blob_length else 0 end), 0) as private_storage - from blob left join stream_blob using (blob_hash) where blob_hash not in (select sd_hash from stream) + from blob left join stream_blob using (blob_hash) + where blob_hash not in (select sd_hash from stream) and blob.status="finished" """) return { 'network_storage': network_size, diff --git a/tests/integration/datanetwork/test_file_commands.py b/tests/integration/datanetwork/test_file_commands.py index ffde6acc9..0e1ac141e 100644 --- a/tests/integration/datanetwork/test_file_commands.py +++ b/tests/integration/datanetwork/test_file_commands.py @@ -599,13 +599,18 @@ class DiskSpaceManagement(CommandTestCase): self.assertTrue(blobs2.issubset(blobs)) self.assertFalse(blobs3.issubset(blobs)) self.assertTrue(blobs4.issubset(blobs)) + # check that pending blobs are not accounted (#3617) + await self.daemon.storage.db.execute_fetchall("update blob set status='pending'") + await self.blob_clean() # just to refresh caches, has no effect + self.assertEqual(0, (await self.status())['disk_space']['total_used_mb']) + self.assertEqual(0, (await self.status())['disk_space']['content_blobs_storage_used_mb']) + self.assertEqual(0, (await self.status())['disk_space']['published_blobs_storage_used_mb']) # check that added_on gets set on downloads (was a bug) self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob")) await self.daemon.jsonrpc_file_delete(delete_all=True) await self.daemon.jsonrpc_get("foo4", save_file=False) self.assertLess(0, await self.daemon.storage.run_and_return_one_or_none("select min(added_on) from blob")) - class TestBackgroundDownloaderComponent(CommandTestCase): async def get_blobs_from_sd_blob(self, sd_blob): descriptor = await StreamDescriptor.from_stream_descriptor_blob(