2018-10-30 17:52:52 +01:00
|
|
|
import logging
|
2018-08-16 01:23:06 +02:00
|
|
|
from decimal import Decimal
|
2019-06-03 22:37:21 +02:00
|
|
|
from binascii import hexlify, unhexlify
|
2018-08-16 01:23:06 +02:00
|
|
|
from datetime import datetime
|
|
|
|
from json import JSONEncoder
|
2019-04-29 06:38:58 +02:00
|
|
|
|
|
|
|
from google.protobuf.message import DecodeError
|
|
|
|
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.schema.claim import Claim
|
2020-06-05 21:19:14 +02:00
|
|
|
from lbry.schema.support import Support
|
2020-02-11 03:15:18 +01:00
|
|
|
from lbry.torrent.torrent_manager import TorrentSource
|
2020-01-03 04:18:49 +01:00
|
|
|
from lbry.wallet import Wallet, Ledger, Account, Transaction, Output
|
|
|
|
from lbry.wallet.bip32 import PubKey
|
2019-06-21 02:55:47 +02:00
|
|
|
from lbry.wallet.dewies import dewies_to_lbc
|
|
|
|
from lbry.stream.managed_stream import ManagedStream
|
2018-08-16 01:23:06 +02:00
|
|
|
|
|
|
|
|
2018-10-30 17:52:52 +01:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
def encode_txo_doc():
|
|
|
|
return {
|
|
|
|
'txid': "hash of transaction in hex",
|
|
|
|
'nout': "position in the transaction",
|
2019-04-20 07:12:43 +02:00
|
|
|
'height': "block where transaction was recorded",
|
2019-04-06 21:33:07 +02:00
|
|
|
'amount': "value of the txo as a decimal",
|
|
|
|
'address': "address of who can spend the txo",
|
2019-04-20 07:12:43 +02:00
|
|
|
'confirmations': "number of confirmed blocks",
|
|
|
|
'is_change': "payment to change address, only available when it can be determined",
|
2020-03-09 04:11:03 +01:00
|
|
|
'is_received': "true if txo was sent from external account to this account",
|
|
|
|
'is_spent': "true if txo is spent",
|
2019-04-20 07:12:43 +02:00
|
|
|
'is_mine': "payment to one of your accounts, only available when it can be determined",
|
2019-11-18 21:48:52 +01:00
|
|
|
'type': "one of 'claim', 'support' or 'purchase'",
|
2019-04-20 07:12:43 +02:00
|
|
|
'name': "when type is 'claim' or 'support', this is the claim name",
|
2019-11-18 21:48:52 +01:00
|
|
|
'claim_id': "when type is 'claim', 'support' or 'purchase', this is the claim id",
|
2019-04-21 23:17:11 +02:00
|
|
|
'claim_op': "when type is 'claim', this determines if it is 'create' or 'update'",
|
|
|
|
'value': "when type is 'claim' or 'support' with payload, this is the decoded protobuf payload",
|
|
|
|
'value_type': "determines the type of the 'value' field: 'channel', 'stream', etc",
|
|
|
|
'protobuf': "hex encoded raw protobuf version of 'value' field",
|
2019-04-20 07:12:43 +02:00
|
|
|
'permanent_url': "when type is 'claim' or 'support', this is the long permanent claim URL",
|
2019-11-18 21:48:52 +01:00
|
|
|
'claim': "for purchase outputs only, metadata of purchased claim",
|
|
|
|
'reposted_claim': "for repost claims only, metadata of claim being reposted",
|
2019-04-20 07:12:43 +02:00
|
|
|
'signing_channel': "for signed claims only, metadata of signing channel",
|
|
|
|
'is_channel_signature_valid': "for signed claims only, whether signature is valid",
|
2019-10-29 06:26:25 +01:00
|
|
|
'purchase_receipt': "metadata for the purchase transaction associated with this claim"
|
2019-04-06 21:33:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def encode_tx_doc():
|
|
|
|
return {
|
|
|
|
'txid': "hash of transaction in hex",
|
|
|
|
'height': "block where transaction was recorded",
|
|
|
|
'inputs': [encode_txo_doc()],
|
|
|
|
'outputs': [encode_txo_doc()],
|
|
|
|
'total_input': "sum of inputs as a decimal",
|
|
|
|
'total_output': "sum of outputs, sans fee, as a decimal",
|
|
|
|
'total_fee': "fee amount",
|
|
|
|
'hex': "entire transaction encoded in hex",
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def encode_account_doc():
|
|
|
|
return {
|
|
|
|
'id': 'account_id',
|
|
|
|
'is_default': 'this account is used by default',
|
|
|
|
'ledger': 'name of crypto currency and network',
|
|
|
|
'name': 'optional account name',
|
|
|
|
'seed': 'human friendly words from which account can be recreated',
|
|
|
|
'encrypted': 'if account is encrypted',
|
|
|
|
'private_key': 'extended private key',
|
|
|
|
'public_key': 'extended public key',
|
|
|
|
'address_generator': 'settings for generating addresses',
|
|
|
|
'modified_on': 'date of last modification to account settings'
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-09-20 15:43:58 +02:00
|
|
|
def encode_wallet_doc():
|
|
|
|
return {
|
|
|
|
'id': 'wallet_id',
|
|
|
|
'name': 'optional wallet name',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-04-06 21:33:07 +02:00
|
|
|
def encode_file_doc():
|
|
|
|
return {
|
2019-05-14 01:05:26 +02:00
|
|
|
'streaming_url': '(str) url to stream the file using range requests',
|
2019-04-06 21:33:07 +02:00
|
|
|
'completed': '(bool) true if download is completed',
|
|
|
|
'file_name': '(str) name of file',
|
|
|
|
'download_directory': '(str) download directory',
|
|
|
|
'points_paid': '(float) credit paid to download file',
|
|
|
|
'stopped': '(bool) true if download is stopped',
|
|
|
|
'stream_hash': '(str) stream hash of file',
|
|
|
|
'stream_name': '(str) stream name',
|
|
|
|
'suggested_file_name': '(str) suggested file name',
|
|
|
|
'sd_hash': '(str) sd hash of file',
|
|
|
|
'download_path': '(str) download path of file',
|
|
|
|
'mime_type': '(str) mime type of file',
|
|
|
|
'key': '(str) key attached to file',
|
|
|
|
'total_bytes_lower_bound': '(int) lower bound file size in bytes',
|
|
|
|
'total_bytes': '(int) file upper bound size in bytes',
|
|
|
|
'written_bytes': '(int) written size in bytes',
|
|
|
|
'blobs_completed': '(int) number of fully downloaded blobs',
|
|
|
|
'blobs_in_stream': '(int) total blobs on stream',
|
|
|
|
'blobs_remaining': '(int) total blobs remaining to download',
|
|
|
|
'status': '(str) downloader status',
|
|
|
|
'claim_id': '(str) None if claim is not found else the claim id',
|
|
|
|
'txid': '(str) None if claim is not found else the transaction id',
|
|
|
|
'nout': '(int) None if claim is not found else the transaction output index',
|
|
|
|
'outpoint': '(str) None if claim is not found else the tx and output',
|
|
|
|
'metadata': '(dict) None if claim is not found else the claim metadata',
|
|
|
|
'channel_claim_id': '(str) None if claim is not found or not signed',
|
|
|
|
'channel_name': '(str) None if claim is not found or not signed',
|
2020-04-10 16:56:45 +02:00
|
|
|
'claim_name': '(str) None if claim is not found else the claim name',
|
2020-04-20 17:57:09 +02:00
|
|
|
'reflector_progress': '(int) reflector upload progress, 0 to 100',
|
|
|
|
'uploading_to_reflector': '(bool) set to True when currently uploading to reflector'
|
2019-04-06 21:33:07 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
class JSONResponseEncoder(JSONEncoder):
|
|
|
|
|
2020-01-03 04:18:49 +01:00
|
|
|
def __init__(self, *args, ledger: Ledger, include_protobuf=False, **kwargs):
|
2018-08-16 01:23:06 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self.ledger = ledger
|
2019-04-21 17:06:03 +02:00
|
|
|
self.include_protobuf = include_protobuf
|
2018-08-16 01:23:06 +02:00
|
|
|
|
2021-08-21 04:36:35 +02:00
|
|
|
def default(self, obj): # pylint: disable=method-hidden,arguments-renamed,too-many-return-statements
|
2019-04-06 21:33:07 +02:00
|
|
|
if isinstance(obj, Account):
|
|
|
|
return self.encode_account(obj)
|
2019-09-20 15:43:58 +02:00
|
|
|
if isinstance(obj, Wallet):
|
|
|
|
return self.encode_wallet(obj)
|
2020-04-26 11:26:08 +02:00
|
|
|
if isinstance(obj, (ManagedStream, TorrentSource)):
|
2020-02-11 03:15:18 +01:00
|
|
|
return self.encode_file(obj)
|
2018-08-16 01:23:06 +02:00
|
|
|
if isinstance(obj, Transaction):
|
|
|
|
return self.encode_transaction(obj)
|
|
|
|
if isinstance(obj, Output):
|
|
|
|
return self.encode_output(obj)
|
2019-03-20 22:31:00 +01:00
|
|
|
if isinstance(obj, Claim):
|
2019-04-20 07:12:43 +02:00
|
|
|
return self.encode_claim(obj)
|
2020-06-05 21:19:14 +02:00
|
|
|
if isinstance(obj, Support):
|
|
|
|
return obj.to_dict()
|
2019-09-20 15:25:50 +02:00
|
|
|
if isinstance(obj, PubKey):
|
|
|
|
return obj.extended_key_string()
|
2018-08-16 01:23:06 +02:00
|
|
|
if isinstance(obj, datetime):
|
|
|
|
return obj.strftime("%Y%m%dT%H:%M:%S")
|
|
|
|
if isinstance(obj, Decimal):
|
|
|
|
return float(obj)
|
2018-08-16 21:55:33 +02:00
|
|
|
if isinstance(obj, bytes):
|
|
|
|
return obj.decode()
|
2018-08-16 01:23:06 +02:00
|
|
|
return super().default(obj)
|
|
|
|
|
|
|
|
def encode_transaction(self, tx):
|
|
|
|
return {
|
|
|
|
'txid': tx.id,
|
2018-09-22 04:19:32 +02:00
|
|
|
'height': tx.height,
|
2018-08-16 01:23:06 +02:00
|
|
|
'inputs': [self.encode_input(txo) for txo in tx.inputs],
|
|
|
|
'outputs': [self.encode_output(txo) for txo in tx.outputs],
|
2018-10-03 22:38:47 +02:00
|
|
|
'total_input': dewies_to_lbc(tx.input_sum),
|
|
|
|
'total_output': dewies_to_lbc(tx.input_sum - tx.fee),
|
|
|
|
'total_fee': dewies_to_lbc(tx.fee),
|
2018-08-16 01:23:06 +02:00
|
|
|
'hex': hexlify(tx.raw).decode(),
|
|
|
|
}
|
|
|
|
|
2019-05-26 19:21:26 +02:00
|
|
|
def encode_output(self, txo, check_signature=True):
|
2019-10-30 03:56:28 +01:00
|
|
|
if not txo:
|
|
|
|
return
|
2018-10-12 15:49:13 +02:00
|
|
|
tx_height = txo.tx_ref.height
|
|
|
|
best_height = self.ledger.headers.height
|
2018-09-22 04:19:32 +02:00
|
|
|
output = {
|
2018-09-19 15:58:50 +02:00
|
|
|
'txid': txo.tx_ref.id,
|
2018-08-16 01:23:06 +02:00
|
|
|
'nout': txo.position,
|
2019-04-20 07:12:43 +02:00
|
|
|
'height': tx_height,
|
2018-10-03 22:38:47 +02:00
|
|
|
'amount': dewies_to_lbc(txo.amount),
|
2019-10-27 18:54:48 +01:00
|
|
|
'address': txo.get_address(self.ledger) if txo.has_address else None,
|
2019-04-27 22:48:40 +02:00
|
|
|
'confirmations': (best_height+1) - tx_height if tx_height > 0 else tx_height,
|
2020-03-18 04:45:44 +01:00
|
|
|
'timestamp': self.ledger.headers.estimated_timestamp(tx_height)
|
2018-08-16 01:23:06 +02:00
|
|
|
}
|
2020-03-07 02:12:38 +01:00
|
|
|
if txo.is_spent is not None:
|
|
|
|
output['is_spent'] = txo.is_spent
|
2020-03-20 06:11:05 +01:00
|
|
|
if txo.is_my_output is not None:
|
|
|
|
output['is_my_output'] = txo.is_my_output
|
|
|
|
if txo.is_my_input is not None:
|
|
|
|
output['is_my_input'] = txo.is_my_input
|
2020-03-22 06:13:26 +01:00
|
|
|
if txo.sent_supports is not None:
|
|
|
|
output['sent_supports'] = dewies_to_lbc(txo.sent_supports)
|
|
|
|
if txo.sent_tips is not None:
|
|
|
|
output['sent_tips'] = dewies_to_lbc(txo.sent_tips)
|
|
|
|
if txo.received_tips is not None:
|
|
|
|
output['received_tips'] = dewies_to_lbc(txo.received_tips)
|
2020-03-20 06:11:05 +01:00
|
|
|
if txo.is_internal_transfer is not None:
|
|
|
|
output['is_internal_transfer'] = txo.is_internal_transfer
|
2018-10-08 16:41:07 +02:00
|
|
|
|
2019-04-20 07:12:43 +02:00
|
|
|
if txo.script.is_claim_name:
|
|
|
|
output['type'] = 'claim'
|
|
|
|
output['claim_op'] = 'create'
|
|
|
|
elif txo.script.is_update_claim:
|
|
|
|
output['type'] = 'claim'
|
|
|
|
output['claim_op'] = 'update'
|
|
|
|
elif txo.script.is_support_claim:
|
|
|
|
output['type'] = 'support'
|
2019-10-29 06:26:25 +01:00
|
|
|
elif txo.script.is_return_data:
|
|
|
|
output['type'] = 'data'
|
|
|
|
elif txo.purchase is not None:
|
2019-10-27 18:54:48 +01:00
|
|
|
output['type'] = 'purchase'
|
2019-10-29 06:26:25 +01:00
|
|
|
output['claim_id'] = txo.purchased_claim_id
|
|
|
|
if txo.purchased_claim is not None:
|
|
|
|
output['claim'] = self.encode_output(txo.purchased_claim)
|
2019-04-20 07:12:43 +02:00
|
|
|
else:
|
|
|
|
output['type'] = 'payment'
|
|
|
|
|
2018-10-05 15:02:02 +02:00
|
|
|
if txo.script.is_claim_involved:
|
|
|
|
output.update({
|
|
|
|
'name': txo.claim_name,
|
2019-05-28 04:20:21 +02:00
|
|
|
'normalized_name': txo.normalized_name,
|
2018-10-05 15:02:02 +02:00
|
|
|
'claim_id': txo.claim_id,
|
2019-05-26 19:21:26 +02:00
|
|
|
'permanent_url': txo.permanent_url,
|
2019-08-30 15:10:14 +02:00
|
|
|
'meta': self.encode_claim_meta(txo.meta.copy())
|
2018-10-05 15:02:02 +02:00
|
|
|
})
|
2019-05-26 19:21:26 +02:00
|
|
|
if 'short_url' in output['meta']:
|
|
|
|
output['short_url'] = output['meta'].pop('short_url')
|
|
|
|
if 'canonical_url' in output['meta']:
|
|
|
|
output['canonical_url'] = output['meta'].pop('canonical_url')
|
2019-11-13 23:50:35 +01:00
|
|
|
if txo.claims is not None:
|
|
|
|
output['claims'] = [self.encode_output(o) for o in txo.claims]
|
2019-11-18 21:48:52 +01:00
|
|
|
if txo.reposted_claim is not None:
|
|
|
|
output['reposted_claim'] = self.encode_output(txo.reposted_claim)
|
2020-06-05 21:19:14 +02:00
|
|
|
if txo.script.is_claim_name or txo.script.is_update_claim or txo.script.is_support_claim_data:
|
|
|
|
try:
|
|
|
|
output['value'] = txo.signable
|
|
|
|
if self.include_protobuf:
|
|
|
|
output['protobuf'] = hexlify(txo.signable.to_bytes())
|
|
|
|
if txo.purchase_receipt is not None:
|
|
|
|
output['purchase_receipt'] = self.encode_output(txo.purchase_receipt)
|
|
|
|
if txo.script.is_claim_name or txo.script.is_update_claim:
|
2019-04-29 06:38:58 +02:00
|
|
|
output['value_type'] = txo.claim.claim_type
|
2019-09-13 15:16:17 +02:00
|
|
|
if txo.claim.is_channel:
|
|
|
|
output['has_signing_key'] = txo.has_private_key
|
2020-06-05 21:19:14 +02:00
|
|
|
if check_signature and txo.signable.is_signed:
|
|
|
|
if txo.channel is not None:
|
|
|
|
output['signing_channel'] = self.encode_output(txo.channel)
|
|
|
|
output['is_channel_signature_valid'] = txo.is_signed_by(txo.channel, self.ledger)
|
|
|
|
else:
|
|
|
|
output['signing_channel'] = {'channel_id': txo.signable.signing_channel_id}
|
|
|
|
output['is_channel_signature_valid'] = False
|
|
|
|
except DecodeError:
|
|
|
|
pass
|
2018-09-22 04:19:32 +02:00
|
|
|
return output
|
2018-08-16 01:23:06 +02:00
|
|
|
|
2019-05-05 05:24:41 +02:00
|
|
|
def encode_claim_meta(self, meta):
|
2019-05-18 05:54:03 +02:00
|
|
|
for key, value in meta.items():
|
2019-05-21 00:18:27 +02:00
|
|
|
if key.endswith('_amount'):
|
2019-05-18 05:54:03 +02:00
|
|
|
if isinstance(value, int):
|
|
|
|
meta[key] = dewies_to_lbc(value)
|
2019-08-16 10:34:40 +02:00
|
|
|
if 0 < meta.get('creation_height', 0) <= self.ledger.headers.height:
|
2020-03-18 04:45:44 +01:00
|
|
|
meta['creation_timestamp'] = self.ledger.headers.estimated_timestamp(meta['creation_height'])
|
2019-05-05 05:24:41 +02:00
|
|
|
return meta
|
|
|
|
|
2018-08-16 01:23:06 +02:00
|
|
|
def encode_input(self, txi):
|
2019-03-24 22:44:46 +01:00
|
|
|
return self.encode_output(txi.txo_ref.txo, False) if txi.txo_ref.txo is not None else {
|
2018-09-22 04:19:32 +02:00
|
|
|
'txid': txi.txo_ref.tx_ref.id,
|
|
|
|
'nout': txi.txo_ref.position
|
|
|
|
}
|
2019-04-06 21:33:07 +02:00
|
|
|
|
|
|
|
def encode_account(self, account):
|
|
|
|
result = account.to_dict()
|
|
|
|
result['id'] = account.id
|
|
|
|
result.pop('certificates', None)
|
|
|
|
result['is_default'] = self.ledger.accounts[0] == account
|
|
|
|
return result
|
|
|
|
|
2020-01-03 07:39:34 +01:00
|
|
|
@staticmethod
|
|
|
|
def encode_wallet(wallet):
|
2019-09-20 15:43:58 +02:00
|
|
|
return {
|
|
|
|
'id': wallet.id,
|
|
|
|
'name': wallet.name
|
|
|
|
}
|
|
|
|
|
2019-04-29 05:37:18 +02:00
|
|
|
def encode_file(self, managed_stream):
|
2019-09-09 01:02:05 +02:00
|
|
|
output_exists = managed_stream.output_file_exists
|
2019-04-29 05:37:18 +02:00
|
|
|
tx_height = managed_stream.stream_claim_info.height
|
|
|
|
best_height = self.ledger.headers.height
|
2020-02-11 03:15:18 +01:00
|
|
|
is_stream = hasattr(managed_stream, 'stream_hash')
|
2020-02-28 19:49:58 +01:00
|
|
|
if is_stream:
|
|
|
|
total_bytes_lower_bound = managed_stream.descriptor.lower_bound_decrypted_length()
|
|
|
|
total_bytes = managed_stream.descriptor.upper_bound_decrypted_length()
|
|
|
|
else:
|
|
|
|
total_bytes_lower_bound = total_bytes = managed_stream.torrent_length
|
2020-04-26 11:26:08 +02:00
|
|
|
result = {
|
|
|
|
'streaming_url': None,
|
2019-09-09 01:02:05 +02:00
|
|
|
'completed': managed_stream.completed,
|
2020-04-26 11:26:08 +02:00
|
|
|
'file_name': None,
|
|
|
|
'download_directory': None,
|
|
|
|
'download_path': None,
|
2019-09-09 01:02:05 +02:00
|
|
|
'points_paid': 0.0,
|
|
|
|
'stopped': not managed_stream.running,
|
2020-04-26 11:26:08 +02:00
|
|
|
'stream_hash': None,
|
|
|
|
'stream_name': None,
|
|
|
|
'suggested_file_name': None,
|
|
|
|
'sd_hash': None,
|
|
|
|
'mime_type': None,
|
|
|
|
'key': None,
|
2020-02-28 19:49:58 +01:00
|
|
|
'total_bytes_lower_bound': total_bytes_lower_bound,
|
|
|
|
'total_bytes': total_bytes,
|
2020-04-26 11:26:08 +02:00
|
|
|
'written_bytes': managed_stream.written_bytes,
|
|
|
|
'blobs_completed': None,
|
|
|
|
'blobs_in_stream': None,
|
|
|
|
'blobs_remaining': None,
|
2019-09-09 01:02:05 +02:00
|
|
|
'status': managed_stream.status,
|
|
|
|
'claim_id': managed_stream.claim_id,
|
|
|
|
'txid': managed_stream.txid,
|
|
|
|
'nout': managed_stream.nout,
|
|
|
|
'outpoint': managed_stream.outpoint,
|
|
|
|
'metadata': managed_stream.metadata,
|
|
|
|
'protobuf': managed_stream.metadata_protobuf,
|
|
|
|
'channel_claim_id': managed_stream.channel_claim_id,
|
|
|
|
'channel_name': managed_stream.channel_name,
|
|
|
|
'claim_name': managed_stream.claim_name,
|
|
|
|
'content_fee': managed_stream.content_fee,
|
2019-10-30 03:56:28 +01:00
|
|
|
'purchase_receipt': self.encode_output(managed_stream.purchase_receipt),
|
2019-10-26 17:24:37 +02:00
|
|
|
'added_on': managed_stream.added_on,
|
2019-04-29 05:37:18 +02:00
|
|
|
'height': tx_height,
|
2019-09-09 01:02:05 +02:00
|
|
|
'confirmations': (best_height + 1) - tx_height if tx_height > 0 else tx_height,
|
2020-03-18 04:45:44 +01:00
|
|
|
'timestamp': self.ledger.headers.estimated_timestamp(tx_height),
|
2020-04-26 11:26:08 +02:00
|
|
|
'is_fully_reflected': False,
|
|
|
|
'reflector_progress': False,
|
|
|
|
'uploading_to_reflector': False
|
2019-09-09 01:02:05 +02:00
|
|
|
}
|
2020-04-26 11:26:08 +02:00
|
|
|
if is_stream:
|
|
|
|
result.update({
|
|
|
|
'streaming_url': managed_stream.stream_url,
|
|
|
|
'stream_hash': managed_stream.stream_hash,
|
|
|
|
'stream_name': managed_stream.descriptor.stream_name,
|
|
|
|
'suggested_file_name': managed_stream.descriptor.suggested_file_name,
|
|
|
|
'sd_hash': managed_stream.descriptor.sd_hash,
|
|
|
|
'mime_type': managed_stream.mime_type,
|
|
|
|
'key': managed_stream.descriptor.key,
|
|
|
|
'blobs_completed': managed_stream.blobs_completed,
|
|
|
|
'blobs_in_stream': managed_stream.blobs_in_stream,
|
|
|
|
'blobs_remaining': managed_stream.blobs_remaining,
|
|
|
|
'is_fully_reflected': managed_stream.is_fully_reflected,
|
|
|
|
'reflector_progress': managed_stream.reflector_progress,
|
|
|
|
'uploading_to_reflector': managed_stream.uploading_to_reflector
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
result.update({
|
|
|
|
'streaming_url': f'file://{managed_stream.full_path}',
|
|
|
|
})
|
|
|
|
if output_exists:
|
|
|
|
result.update({
|
|
|
|
'file_name': managed_stream.file_name,
|
|
|
|
'download_directory': managed_stream.download_directory,
|
|
|
|
'download_path': managed_stream.full_path,
|
|
|
|
})
|
|
|
|
return result
|
2019-04-20 07:12:43 +02:00
|
|
|
|
2019-06-03 22:37:21 +02:00
|
|
|
def encode_claim(self, claim):
|
|
|
|
encoded = getattr(claim, claim.claim_type).to_dict()
|
|
|
|
if 'public_key' in encoded:
|
|
|
|
encoded['public_key_id'] = self.ledger.public_key_to_address(
|
|
|
|
unhexlify(encoded['public_key'])
|
|
|
|
)
|
|
|
|
return encoded
|