diff --git a/lbry/lbry/extras/daemon/Daemon.py b/lbry/lbry/extras/daemon/Daemon.py index 701828a81..5552ba0a6 100644 --- a/lbry/lbry/extras/daemon/Daemon.py +++ b/lbry/lbry/extras/daemon/Daemon.py @@ -1769,7 +1769,7 @@ class Daemon(metaclass=JSONRPCServerType): Usage: file_list [--sd_hash=] [--file_name=] [--stream_hash=] - [--rowid=] [--added_at=] [--claim_id=] + [--rowid=] [--added_on=] [--claim_id=] [--outpoint=] [--txid=] [--nout=] [--channel_claim_id=] [--channel_name=] [--claim_name=] [--blobs_in_stream=] @@ -1783,7 +1783,7 @@ class Daemon(metaclass=JSONRPCServerType): downloads folder --stream_hash= : (str) get file with matching stream hash --rowid= : (int) get file with matching row id - --added_at= : (int) get file with matching time of insertion + --added_on= : (int) get file with matching time of insertion --claim_id= : (str) get file with matching claim id --outpoint= : (str) get file with matching claim outpoint --txid= : (str) get file with matching claim txid diff --git a/lbry/lbry/extras/daemon/json_response_encoder.py b/lbry/lbry/extras/daemon/json_response_encoder.py index 318fe8084..e4e9077b7 100644 --- a/lbry/lbry/extras/daemon/json_response_encoder.py +++ b/lbry/lbry/extras/daemon/json_response_encoder.py @@ -273,7 +273,7 @@ class JSONResponseEncoder(JSONEncoder): 'channel_name': managed_stream.channel_name, 'claim_name': managed_stream.claim_name, 'content_fee': managed_stream.content_fee, - 'added_at': managed_stream.added_at, + 'added_on': managed_stream.added_on, 'height': tx_height, 'confirmations': (best_height + 1) - tx_height if tx_height > 0 else tx_height, 'timestamp': self.ledger.headers[tx_height]['timestamp'] if 0 < tx_height <= best_height else None diff --git a/lbry/lbry/extras/daemon/migrator/migrate11to12.py b/lbry/lbry/extras/daemon/migrator/migrate11to12.py index 3d2bda5d2..fa57e2119 100644 --- a/lbry/lbry/extras/daemon/migrator/migrate11to12.py +++ b/lbry/lbry/extras/daemon/migrator/migrate11to12.py @@ -13,7 +13,7 @@ def do_migration(conf): for col_info in cursor.execute("pragma table_info('file');").fetchall(): current_columns.append(col_info[1]) - if 'added_at' in current_columns: + if 'added_on' in current_columns: connection.close() print('already migrated') return @@ -32,7 +32,7 @@ def do_migration(conf): status text not null, saved_file integer not null, content_fee text, - added_at integer not null + added_on integer not null ); @@ -42,10 +42,10 @@ def do_migration(conf): select = "select * from file" for (stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee) \ in cursor.execute(select).fetchall(): - added_at = int(time.time()) + added_on = int(time.time()) cursor.execute( "insert into new_file values (?, ?, ?, ?, ?, ?, ?, ?)", - (stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee, added_at) + (stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee, added_on) ) # step 6: drop old table diff --git a/lbry/lbry/extras/daemon/storage.py b/lbry/lbry/extras/daemon/storage.py index b580a3502..d0c516a32 100644 --- a/lbry/lbry/extras/daemon/storage.py +++ b/lbry/lbry/extras/daemon/storage.py @@ -103,7 +103,7 @@ def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Di stream_hashes = tuple( stream_hash for (stream_hash,) in transaction.execute("select stream_hash from file").fetchall() ) - for (rowid, stream_hash, file_name, download_dir, data_rate, status, saved_file, raw_content_fee, added_at, + for (rowid, stream_hash, file_name, download_dir, data_rate, status, saved_file, raw_content_fee, added_on, _, sd_hash, stream_key, stream_name, suggested_file_name, *claim_args) in _batched_select( transaction, "select file.rowid, file.*, stream.*, c.* " "from file inner join stream on file.stream_hash=stream.stream_hash " @@ -119,7 +119,7 @@ def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Di files.append( { "rowid": rowid, - "added_at": added_at, + "added_on": added_on, "stream_hash": stream_hash, "file_name": file_name, # hex "download_directory": download_dir, # hex @@ -181,13 +181,13 @@ def delete_stream(transaction: sqlite3.Connection, descriptor: 'StreamDescriptor def store_file(transaction: sqlite3.Connection, stream_hash: str, file_name: typing.Optional[str], download_directory: typing.Optional[str], data_payment_rate: float, status: str, - content_fee: typing.Optional[Transaction], added_at: typing.Optional[int] = None) -> int: + content_fee: typing.Optional[Transaction], added_on: typing.Optional[int] = None) -> int: if not file_name and not download_directory: encoded_file_name, encoded_download_dir = None, None else: encoded_file_name = binascii.hexlify(file_name.encode()).decode() encoded_download_dir = binascii.hexlify(download_directory.encode()).decode() - time_added = added_at or int(time.time()) + time_added = added_on or int(time.time()) transaction.execute( "insert or replace into file values (?, ?, ?, ?, ?, ?, ?, ?)", (stream_hash, encoded_file_name, encoded_download_dir, data_payment_rate, status, @@ -251,7 +251,7 @@ class SQLiteStorage(SQLiteMixin): status text not null, saved_file integer not null, content_fee text, - added_at integer not null + added_on integer not null ); create table if not exists content_claim ( @@ -454,19 +454,19 @@ class SQLiteStorage(SQLiteMixin): def save_downloaded_file(self, stream_hash: str, file_name: typing.Optional[str], download_directory: typing.Optional[str], data_payment_rate: float, content_fee: typing.Optional[Transaction] = None, - added_at: typing.Optional[int] = None) -> typing.Awaitable[int]: + added_on: typing.Optional[int] = None) -> typing.Awaitable[int]: return self.save_published_file( stream_hash, file_name, download_directory, data_payment_rate, status="running", - content_fee=content_fee, added_at=added_at + content_fee=content_fee, added_on=added_on ) def save_published_file(self, stream_hash: str, file_name: typing.Optional[str], download_directory: typing.Optional[str], data_payment_rate: float, status: str = "finished", content_fee: typing.Optional[Transaction] = None, - added_at: typing.Optional[int] = None) -> typing.Awaitable[int]: + added_on: typing.Optional[int] = None) -> typing.Awaitable[int]: return self.db.run(store_file, stream_hash, file_name, download_directory, data_payment_rate, status, - content_fee, added_at) + content_fee, added_on) async def update_manually_removed_files_since_last_run(self): """ diff --git a/lbry/lbry/stream/managed_stream.py b/lbry/lbry/stream/managed_stream.py index 4488f5296..0695bd5e1 100644 --- a/lbry/lbry/stream/managed_stream.py +++ b/lbry/lbry/stream/managed_stream.py @@ -55,7 +55,7 @@ class ManagedStream: 'sd_hash', 'download_directory', '_file_name', - '_added_at', + '_added_on', '_status', 'stream_claim_info', 'download_id', @@ -82,7 +82,7 @@ class ManagedStream: descriptor: typing.Optional[StreamDescriptor] = None, content_fee: typing.Optional['Transaction'] = None, analytics_manager: typing.Optional['AnalyticsManager'] = None, - added_at: typing.Optional[int] = None): + added_on: typing.Optional[int] = None): self.loop = loop self.config = config self.blob_manager = blob_manager @@ -94,7 +94,7 @@ class ManagedStream: self.download_id = download_id or binascii.hexlify(generate_id()).decode() self.rowid = rowid self.content_fee = content_fee - self._added_at = added_at + self._added_on = added_on self.downloader = StreamDownloader(self.loop, self.config, self.blob_manager, sd_hash, descriptor) self.analytics_manager = analytics_manager @@ -122,8 +122,8 @@ class ManagedStream: return self._file_name or (self.descriptor.suggested_file_name if self.descriptor else None) @property - def added_at(self) -> typing.Optional[int]: - return self._added_at + def added_on(self) -> typing.Optional[int]: + return self._added_on @property def status(self) -> str: @@ -261,9 +261,9 @@ class ManagedStream: file_name, download_dir = self._file_name, self.download_directory else: file_name, download_dir = None, None - self._added_at = int(time.time()) + self._added_on = int(time.time()) self.rowid = await self.blob_manager.storage.save_downloaded_file( - self.stream_hash, file_name, download_dir, 0.0, added_at=self._added_at + self.stream_hash, file_name, download_dir, 0.0, added_on=self._added_on ) if self.status != self.STATUS_RUNNING: await self.update_status(self.STATUS_RUNNING) diff --git a/lbry/lbry/stream/stream_manager.py b/lbry/lbry/stream/stream_manager.py index 442a9e7c8..637a57129 100644 --- a/lbry/lbry/stream/stream_manager.py +++ b/lbry/lbry/stream/stream_manager.py @@ -31,7 +31,7 @@ filter_fields = [ 'rowid', 'status', 'file_name', - 'added_at', + 'added_on', 'sd_hash', 'stream_hash', 'claim_name', @@ -117,7 +117,7 @@ class StreamManager: async def add_stream(self, rowid: int, sd_hash: str, file_name: typing.Optional[str], download_directory: typing.Optional[str], status: str, claim: typing.Optional['StoredStreamClaim'], content_fee: typing.Optional['Transaction'], - added_at: typing.Optional[int]): + added_on: typing.Optional[int]): try: descriptor = await self.blob_manager.get_stream_descriptor(sd_hash) except InvalidStreamDescriptorError as err: @@ -126,7 +126,7 @@ class StreamManager: stream = ManagedStream( self.loop, self.config, self.blob_manager, descriptor.sd_hash, download_directory, file_name, status, claim, content_fee=content_fee, rowid=rowid, descriptor=descriptor, - analytics_manager=self.analytics_manager, added_at=added_at + analytics_manager=self.analytics_manager, added_on=added_on ) self.streams[sd_hash] = stream self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream) @@ -159,7 +159,7 @@ class StreamManager: file_info['rowid'], file_info['sd_hash'], file_name, download_directory, file_info['status'], file_info['claim'], file_info['content_fee'], - file_info['added_at'] + file_info['added_on'] ))) if add_stream_tasks: await asyncio.gather(*add_stream_tasks, loop=self.loop)