renamed added_at to added_on, to be consistent with existing modified_on field

This commit is contained in:
Lex Berezhny 2019-10-26 11:24:37 -04:00
parent 4237331222
commit dfc1e6a2b9
6 changed files with 27 additions and 27 deletions

View file

@ -1769,7 +1769,7 @@ class Daemon(metaclass=JSONRPCServerType):
Usage:
file_list [--sd_hash=<sd_hash>] [--file_name=<file_name>] [--stream_hash=<stream_hash>]
[--rowid=<rowid>] [--added_at=<added_at>] [--claim_id=<claim_id>]
[--rowid=<rowid>] [--added_on=<added_on>] [--claim_id=<claim_id>]
[--outpoint=<outpoint>] [--txid=<txid>] [--nout=<nout>]
[--channel_claim_id=<channel_claim_id>] [--channel_name=<channel_name>]
[--claim_name=<claim_name>] [--blobs_in_stream=<blobs_in_stream>]
@ -1783,7 +1783,7 @@ class Daemon(metaclass=JSONRPCServerType):
downloads folder
--stream_hash=<stream_hash> : (str) get file with matching stream hash
--rowid=<rowid> : (int) get file with matching row id
--added_at=<added_at> : (int) get file with matching time of insertion
--added_on=<added_on> : (int) get file with matching time of insertion
--claim_id=<claim_id> : (str) get file with matching claim id
--outpoint=<outpoint> : (str) get file with matching claim outpoint
--txid=<txid> : (str) get file with matching claim txid

View file

@ -273,7 +273,7 @@ class JSONResponseEncoder(JSONEncoder):
'channel_name': managed_stream.channel_name,
'claim_name': managed_stream.claim_name,
'content_fee': managed_stream.content_fee,
'added_at': managed_stream.added_at,
'added_on': managed_stream.added_on,
'height': tx_height,
'confirmations': (best_height + 1) - tx_height if tx_height > 0 else tx_height,
'timestamp': self.ledger.headers[tx_height]['timestamp'] if 0 < tx_height <= best_height else None

View file

@ -13,7 +13,7 @@ def do_migration(conf):
for col_info in cursor.execute("pragma table_info('file');").fetchall():
current_columns.append(col_info[1])
if 'added_at' in current_columns:
if 'added_on' in current_columns:
connection.close()
print('already migrated')
return
@ -32,7 +32,7 @@ def do_migration(conf):
status text not null,
saved_file integer not null,
content_fee text,
added_at integer not null
added_on integer not null
);
@ -42,10 +42,10 @@ def do_migration(conf):
select = "select * from file"
for (stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee) \
in cursor.execute(select).fetchall():
added_at = int(time.time())
added_on = int(time.time())
cursor.execute(
"insert into new_file values (?, ?, ?, ?, ?, ?, ?, ?)",
(stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee, added_at)
(stream_hash, file_name, download_dir, data_rate, blob_rate, status, saved_file, fee, added_on)
)
# step 6: drop old table

View file

@ -103,7 +103,7 @@ def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Di
stream_hashes = tuple(
stream_hash for (stream_hash,) in transaction.execute("select stream_hash from file").fetchall()
)
for (rowid, stream_hash, file_name, download_dir, data_rate, status, saved_file, raw_content_fee, added_at,
for (rowid, stream_hash, file_name, download_dir, data_rate, status, saved_file, raw_content_fee, added_on,
_, sd_hash, stream_key, stream_name, suggested_file_name, *claim_args) in _batched_select(
transaction, "select file.rowid, file.*, stream.*, c.* "
"from file inner join stream on file.stream_hash=stream.stream_hash "
@ -119,7 +119,7 @@ def get_all_lbry_files(transaction: sqlite3.Connection) -> typing.List[typing.Di
files.append(
{
"rowid": rowid,
"added_at": added_at,
"added_on": added_on,
"stream_hash": stream_hash,
"file_name": file_name, # hex
"download_directory": download_dir, # hex
@ -181,13 +181,13 @@ def delete_stream(transaction: sqlite3.Connection, descriptor: 'StreamDescriptor
def store_file(transaction: sqlite3.Connection, stream_hash: str, file_name: typing.Optional[str],
download_directory: typing.Optional[str], data_payment_rate: float, status: str,
content_fee: typing.Optional[Transaction], added_at: typing.Optional[int] = None) -> int:
content_fee: typing.Optional[Transaction], added_on: typing.Optional[int] = None) -> int:
if not file_name and not download_directory:
encoded_file_name, encoded_download_dir = None, None
else:
encoded_file_name = binascii.hexlify(file_name.encode()).decode()
encoded_download_dir = binascii.hexlify(download_directory.encode()).decode()
time_added = added_at or int(time.time())
time_added = added_on or int(time.time())
transaction.execute(
"insert or replace into file values (?, ?, ?, ?, ?, ?, ?, ?)",
(stream_hash, encoded_file_name, encoded_download_dir, data_payment_rate, status,
@ -251,7 +251,7 @@ class SQLiteStorage(SQLiteMixin):
status text not null,
saved_file integer not null,
content_fee text,
added_at integer not null
added_on integer not null
);
create table if not exists content_claim (
@ -454,19 +454,19 @@ class SQLiteStorage(SQLiteMixin):
def save_downloaded_file(self, stream_hash: str, file_name: typing.Optional[str],
download_directory: typing.Optional[str], data_payment_rate: float,
content_fee: typing.Optional[Transaction] = None,
added_at: typing.Optional[int] = None) -> typing.Awaitable[int]:
added_on: typing.Optional[int] = None) -> typing.Awaitable[int]:
return self.save_published_file(
stream_hash, file_name, download_directory, data_payment_rate, status="running",
content_fee=content_fee, added_at=added_at
content_fee=content_fee, added_on=added_on
)
def save_published_file(self, stream_hash: str, file_name: typing.Optional[str],
download_directory: typing.Optional[str], data_payment_rate: float,
status: str = "finished",
content_fee: typing.Optional[Transaction] = None,
added_at: typing.Optional[int] = None) -> typing.Awaitable[int]:
added_on: typing.Optional[int] = None) -> typing.Awaitable[int]:
return self.db.run(store_file, stream_hash, file_name, download_directory, data_payment_rate, status,
content_fee, added_at)
content_fee, added_on)
async def update_manually_removed_files_since_last_run(self):
"""

View file

@ -55,7 +55,7 @@ class ManagedStream:
'sd_hash',
'download_directory',
'_file_name',
'_added_at',
'_added_on',
'_status',
'stream_claim_info',
'download_id',
@ -82,7 +82,7 @@ class ManagedStream:
descriptor: typing.Optional[StreamDescriptor] = None,
content_fee: typing.Optional['Transaction'] = None,
analytics_manager: typing.Optional['AnalyticsManager'] = None,
added_at: typing.Optional[int] = None):
added_on: typing.Optional[int] = None):
self.loop = loop
self.config = config
self.blob_manager = blob_manager
@ -94,7 +94,7 @@ class ManagedStream:
self.download_id = download_id or binascii.hexlify(generate_id()).decode()
self.rowid = rowid
self.content_fee = content_fee
self._added_at = added_at
self._added_on = added_on
self.downloader = StreamDownloader(self.loop, self.config, self.blob_manager, sd_hash, descriptor)
self.analytics_manager = analytics_manager
@ -122,8 +122,8 @@ class ManagedStream:
return self._file_name or (self.descriptor.suggested_file_name if self.descriptor else None)
@property
def added_at(self) -> typing.Optional[int]:
return self._added_at
def added_on(self) -> typing.Optional[int]:
return self._added_on
@property
def status(self) -> str:
@ -261,9 +261,9 @@ class ManagedStream:
file_name, download_dir = self._file_name, self.download_directory
else:
file_name, download_dir = None, None
self._added_at = int(time.time())
self._added_on = int(time.time())
self.rowid = await self.blob_manager.storage.save_downloaded_file(
self.stream_hash, file_name, download_dir, 0.0, added_at=self._added_at
self.stream_hash, file_name, download_dir, 0.0, added_on=self._added_on
)
if self.status != self.STATUS_RUNNING:
await self.update_status(self.STATUS_RUNNING)

View file

@ -31,7 +31,7 @@ filter_fields = [
'rowid',
'status',
'file_name',
'added_at',
'added_on',
'sd_hash',
'stream_hash',
'claim_name',
@ -117,7 +117,7 @@ class StreamManager:
async def add_stream(self, rowid: int, sd_hash: str, file_name: typing.Optional[str],
download_directory: typing.Optional[str], status: str,
claim: typing.Optional['StoredStreamClaim'], content_fee: typing.Optional['Transaction'],
added_at: typing.Optional[int]):
added_on: typing.Optional[int]):
try:
descriptor = await self.blob_manager.get_stream_descriptor(sd_hash)
except InvalidStreamDescriptorError as err:
@ -126,7 +126,7 @@ class StreamManager:
stream = ManagedStream(
self.loop, self.config, self.blob_manager, descriptor.sd_hash, download_directory, file_name, status,
claim, content_fee=content_fee, rowid=rowid, descriptor=descriptor,
analytics_manager=self.analytics_manager, added_at=added_at
analytics_manager=self.analytics_manager, added_on=added_on
)
self.streams[sd_hash] = stream
self.storage.content_claim_callbacks[stream.stream_hash] = lambda: self._update_content_claim(stream)
@ -159,7 +159,7 @@ class StreamManager:
file_info['rowid'], file_info['sd_hash'], file_name,
download_directory, file_info['status'],
file_info['claim'], file_info['content_fee'],
file_info['added_at']
file_info['added_on']
)))
if add_stream_tasks:
await asyncio.gather(*add_stream_tasks, loop=self.loop)