2016-07-20 17:44:06 +02:00
|
|
|
import mock
|
2018-07-21 20:12:29 +02:00
|
|
|
import json
|
2018-05-07 23:37:41 +02:00
|
|
|
import random
|
2018-05-02 01:30:48 +02:00
|
|
|
from os import path
|
2017-04-26 19:27:24 +02:00
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
from twisted.internet import defer
|
2018-07-24 18:46:18 +02:00
|
|
|
from twisted.trial import unittest
|
2017-04-26 19:27:24 +02:00
|
|
|
|
2018-05-02 01:30:48 +02:00
|
|
|
from faker import Faker
|
|
|
|
|
2018-09-17 22:31:44 +02:00
|
|
|
from lbrynet.schema.decode import smart_decode
|
2017-04-26 19:27:24 +02:00
|
|
|
from lbrynet import conf
|
2018-11-04 20:00:57 +01:00
|
|
|
from lbrynet.extras.daemon.storage import SQLiteStorage
|
2018-11-04 19:44:17 +01:00
|
|
|
from lbrynet.extras.daemon.ComponentManager import ComponentManager
|
|
|
|
from lbrynet.extras.daemon.Components import DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, STREAM_IDENTIFIER_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import f2d
|
|
|
|
from lbrynet.extras.daemon.Components import HASH_ANNOUNCER_COMPONENT, REFLECTOR_COMPONENT, UPNP_COMPONENT, BLOB_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import PEER_PROTOCOL_SERVER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Components import RATE_LIMITER_COMPONENT, HEADERS_COMPONENT, FILE_MANAGER_COMPONENT
|
|
|
|
from lbrynet.extras.daemon.Daemon import Daemon as LBRYDaemon
|
2018-05-02 01:30:48 +02:00
|
|
|
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
|
2018-11-04 10:42:47 +01:00
|
|
|
from lbrynet.extras.wallet import LbryWalletManager
|
2018-11-04 07:24:41 +01:00
|
|
|
from torba.client.wallet import Wallet
|
2018-07-05 05:16:52 +02:00
|
|
|
|
2018-11-04 20:06:29 +01:00
|
|
|
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
|
2018-07-06 22:16:58 +02:00
|
|
|
from tests import util
|
|
|
|
from tests.mocks import mock_conf_settings, FakeNetwork, FakeFileManager
|
|
|
|
from tests.mocks import ExchangeRateManager as DummyExchangeRateManager
|
|
|
|
from tests.mocks import BTCLBCFeed, USDBTCFeed
|
|
|
|
from tests.util import is_android
|
2017-10-17 04:11:20 +02:00
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2017-10-10 19:31:07 +02:00
|
|
|
import logging
|
|
|
|
logging.getLogger("lbryum").setLevel(logging.WARNING)
|
|
|
|
|
|
|
|
|
2016-12-04 22:18:13 +01:00
|
|
|
def get_test_daemon(data_rate=None, generous=True, with_fee=False):
|
|
|
|
if data_rate is None:
|
2017-01-17 04:23:20 +01:00
|
|
|
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
2016-12-04 22:18:13 +01:00
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
rates = {
|
2017-01-26 02:06:17 +01:00
|
|
|
'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1},
|
|
|
|
'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2}
|
|
|
|
}
|
2018-08-03 15:36:03 +02:00
|
|
|
component_manager = ComponentManager(
|
|
|
|
skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, UPNP_COMPONENT,
|
|
|
|
PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT,
|
|
|
|
STREAM_IDENTIFIER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
|
|
|
|
HEADERS_COMPONENT, RATE_LIMITER_COMPONENT],
|
|
|
|
file_manager=FakeFileManager
|
|
|
|
)
|
|
|
|
daemon = LBRYDaemon(component_manager=component_manager)
|
2018-08-02 21:54:43 +02:00
|
|
|
daemon.payment_rate_manager = OnlyFreePaymentsManager()
|
2018-08-26 05:20:43 +02:00
|
|
|
daemon.wallet_manager = mock.Mock(spec=LbryWalletManager)
|
|
|
|
daemon.wallet_manager.wallet = mock.Mock(spec=Wallet)
|
|
|
|
daemon.wallet_manager.wallet.use_encryption = False
|
|
|
|
daemon.wallet_manager.network = FakeNetwork()
|
2018-08-02 20:34:02 +02:00
|
|
|
daemon.storage = mock.Mock(spec=SQLiteStorage)
|
2017-05-31 20:15:15 +02:00
|
|
|
market_feeds = [BTCLBCFeed(), USDBTCFeed()]
|
|
|
|
daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates)
|
2018-08-03 15:36:03 +02:00
|
|
|
daemon.file_manager = component_manager.get_component(FILE_MANAGER_COMPONENT)
|
2017-06-09 19:47:13 +02:00
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
metadata = {
|
2016-12-04 22:18:13 +01:00
|
|
|
"author": "fake author",
|
2017-04-11 04:47:54 +02:00
|
|
|
"language": "en",
|
2016-12-04 22:18:13 +01:00
|
|
|
"content_type": "fake/format",
|
|
|
|
"description": "fake description",
|
|
|
|
"license": "fake license",
|
|
|
|
"license_url": "fake license url",
|
2016-12-02 20:39:01 +01:00
|
|
|
"nsfw": False,
|
|
|
|
"sources": {
|
2017-10-02 18:13:45 +02:00
|
|
|
"lbry_sd_hash": 'd2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98'
|
|
|
|
'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280'
|
2016-12-04 22:18:13 +01:00
|
|
|
},
|
|
|
|
"thumbnail": "fake thumbnail",
|
|
|
|
"title": "fake title",
|
2016-12-02 20:39:01 +01:00
|
|
|
"ver": "0.0.3"
|
|
|
|
}
|
|
|
|
if with_fee:
|
2017-01-26 02:06:17 +01:00
|
|
|
metadata.update(
|
2017-04-11 04:47:54 +02:00
|
|
|
{"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}})
|
2017-04-12 23:42:55 +02:00
|
|
|
migrated = smart_decode(json.dumps(metadata))
|
2018-08-26 05:20:43 +02:00
|
|
|
daemon._resolve = daemon.wallet_manager.resolve = lambda *_: defer.succeed(
|
2017-09-29 12:44:22 +02:00
|
|
|
{"test": {'claim': {'value': migrated.claim_dict}}})
|
2016-12-02 20:39:01 +01:00
|
|
|
return daemon
|
|
|
|
|
|
|
|
|
2018-07-24 18:46:18 +02:00
|
|
|
class TestCostEst(unittest.TestCase):
|
2018-07-15 16:53:52 +02:00
|
|
|
|
2016-12-02 20:39:01 +01:00
|
|
|
def setUp(self):
|
2017-01-17 04:23:20 +01:00
|
|
|
mock_conf_settings(self)
|
2016-12-02 20:39:01 +01:00
|
|
|
util.resetTime(self)
|
|
|
|
|
2018-07-15 16:53:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-12-04 22:18:13 +01:00
|
|
|
def test_fee_and_generous_data(self):
|
2016-12-02 20:39:01 +01:00
|
|
|
size = 10000000
|
2016-12-04 22:18:13 +01:00
|
|
|
correct_result = 4.5
|
2016-12-02 20:39:01 +01:00
|
|
|
daemon = get_test_daemon(generous=True, with_fee=True)
|
2018-10-17 17:30:09 +02:00
|
|
|
result = yield f2d(daemon.get_est_cost("test", size))
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(result, correct_result)
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2018-07-15 16:53:52 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_fee_and_ungenerous_data(self):
|
|
|
|
size = 10000000
|
|
|
|
fake_fee_amount = 4.5
|
|
|
|
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
|
|
|
correct_result = size / 10 ** 6 * data_rate + fake_fee_amount
|
|
|
|
daemon = get_test_daemon(generous=False, with_fee=True)
|
2018-10-17 17:30:09 +02:00
|
|
|
result = yield f2d(daemon.get_est_cost("test", size))
|
2018-08-05 03:04:08 +02:00
|
|
|
self.assertEqual(result, round(correct_result, 1))
|
2016-12-02 20:39:01 +01:00
|
|
|
|
2018-07-15 16:53:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-12-04 22:18:13 +01:00
|
|
|
def test_generous_data_and_no_fee(self):
|
2016-12-02 20:39:01 +01:00
|
|
|
size = 10000000
|
2016-12-04 22:18:13 +01:00
|
|
|
correct_result = 0.0
|
2016-12-02 20:39:01 +01:00
|
|
|
daemon = get_test_daemon(generous=True)
|
2018-10-17 17:30:09 +02:00
|
|
|
result = yield f2d(daemon.get_est_cost("test", size))
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(result, correct_result)
|
2018-07-15 16:53:52 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def test_ungenerous_data_and_no_fee(self):
|
|
|
|
size = 10000000
|
|
|
|
data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
|
|
|
|
correct_result = size / 10 ** 6 * data_rate
|
|
|
|
daemon = get_test_daemon(generous=False)
|
2018-10-17 17:30:09 +02:00
|
|
|
result = yield f2d(daemon.get_est_cost("test", size))
|
2018-08-05 03:04:08 +02:00
|
|
|
self.assertEqual(result, round(correct_result, 1))
|
2017-01-26 02:06:17 +01:00
|
|
|
|
|
|
|
|
2018-07-24 18:46:18 +02:00
|
|
|
class TestJsonRpc(unittest.TestCase):
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
def setUp(self):
|
2017-02-21 19:47:47 +01:00
|
|
|
def noop():
|
|
|
|
return None
|
|
|
|
|
2017-01-26 02:06:17 +01:00
|
|
|
mock_conf_settings(self)
|
|
|
|
util.resetTime(self)
|
|
|
|
self.test_daemon = get_test_daemon()
|
2018-08-26 05:20:43 +02:00
|
|
|
self.test_daemon.wallet_manager.is_first_run = False
|
|
|
|
self.test_daemon.wallet_manager.get_best_blockhash = noop
|
2017-01-26 02:06:17 +01:00
|
|
|
|
|
|
|
def test_status(self):
|
|
|
|
d = defer.maybeDeferred(self.test_daemon.jsonrpc_status)
|
|
|
|
d.addCallback(lambda status: self.assertDictContainsSubset({'is_running': False}, status))
|
|
|
|
|
|
|
|
def test_help(self):
|
|
|
|
d = defer.maybeDeferred(self.test_daemon.jsonrpc_help, command='status')
|
2017-03-09 13:58:36 +01:00
|
|
|
d.addCallback(lambda result: self.assertSubstring('daemon status', result['help']))
|
2017-01-26 02:06:17 +01:00
|
|
|
# self.assertSubstring('daemon status', d.result)
|
2018-05-02 01:30:48 +02:00
|
|
|
|
2018-07-24 18:46:18 +02:00
|
|
|
if is_android():
|
|
|
|
test_help.skip = "Test cannot pass on Android because PYTHONOPTIMIZE removes the docstrings."
|
|
|
|
|
2018-05-02 01:30:48 +02:00
|
|
|
|
2018-07-24 18:46:18 +02:00
|
|
|
class TestFileListSorting(unittest.TestCase):
|
2018-07-22 03:12:33 +02:00
|
|
|
|
2018-05-02 01:30:48 +02:00
|
|
|
def setUp(self):
|
|
|
|
mock_conf_settings(self)
|
|
|
|
util.resetTime(self)
|
|
|
|
self.faker = Faker('en_US')
|
2018-07-22 03:12:33 +02:00
|
|
|
self.faker.seed(129) # contains 3 same points paid (5.9)
|
2018-05-02 01:30:48 +02:00
|
|
|
self.test_daemon = get_test_daemon()
|
2018-07-24 18:46:18 +02:00
|
|
|
self.test_daemon.file_manager.lbry_files = self._get_fake_lbry_files()
|
2018-07-22 03:12:33 +02:00
|
|
|
|
|
|
|
self.test_points_paid = [
|
|
|
|
2.5, 4.8, 5.9, 5.9, 5.9, 6.1, 7.1, 8.2, 8.4, 9.1
|
|
|
|
]
|
|
|
|
self.test_file_names = [
|
|
|
|
'add.mp3', 'any.mov', 'day.tiff', 'decade.odt', 'different.json', 'hotel.bmp',
|
|
|
|
'might.bmp', 'physical.json', 'remember.mp3', 'than.ppt'
|
|
|
|
]
|
|
|
|
self.test_authors = [
|
|
|
|
'ashlee27', 'bfrederick', 'brittanyhicks', 'davidsonjeffrey', 'heidiherring',
|
|
|
|
'jlewis', 'kswanson', 'michelle50', 'richard64', 'xsteele'
|
|
|
|
]
|
2018-08-03 15:36:03 +02:00
|
|
|
return self.test_daemon.component_manager.setup()
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_points_paid_no_direction_specified(self):
|
|
|
|
sort_options = ['points_paid']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(self.test_points_paid, [f['points_paid'] for f in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_points_paid_ascending(self):
|
|
|
|
sort_options = ['points_paid,asc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(self.test_points_paid, [f['points_paid'] for f in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_points_paid_descending(self):
|
|
|
|
sort_options = ['points_paid, desc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(list(reversed(self.test_points_paid)), [f['points_paid'] for f in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_file_name_no_direction_specified(self):
|
|
|
|
sort_options = ['file_name']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(self.test_file_names, [f['file_name'] for f in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_file_name_ascending(self):
|
2018-07-22 03:12:33 +02:00
|
|
|
sort_options = ['file_name,\nasc']
|
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(self.test_file_names, [f['file_name'] for f in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_file_name_descending(self):
|
2018-07-22 03:12:33 +02:00
|
|
|
sort_options = ['\tfile_name,\n\tdesc']
|
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(list(reversed(self.test_file_names)), [f['file_name'] for f in file_list])
|
2018-05-02 01:30:48 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-02 01:30:48 +02:00
|
|
|
def test_sort_by_multiple_criteria(self):
|
2018-08-13 23:16:48 +02:00
|
|
|
expected = [
|
2018-07-22 03:12:33 +02:00
|
|
|
'file_name=different.json, points_paid=9.1',
|
|
|
|
'file_name=physical.json, points_paid=8.4',
|
|
|
|
'file_name=any.mov, points_paid=8.2',
|
|
|
|
'file_name=hotel.bmp, points_paid=7.1',
|
|
|
|
'file_name=add.mp3, points_paid=6.1',
|
|
|
|
'file_name=decade.odt, points_paid=5.9',
|
|
|
|
'file_name=might.bmp, points_paid=5.9',
|
|
|
|
'file_name=than.ppt, points_paid=5.9',
|
|
|
|
'file_name=remember.mp3, points_paid=4.8',
|
|
|
|
'file_name=day.tiff, points_paid=2.5'
|
2018-08-13 23:16:48 +02:00
|
|
|
]
|
2018-05-07 23:37:41 +02:00
|
|
|
format_result = lambda f: 'file_name={}, points_paid={}'.format(f['file_name'], f['points_paid'])
|
|
|
|
|
|
|
|
sort_options = ['file_name,asc', 'points_paid,desc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
|
|
|
self.assertEqual(expected, [format_result(r) for r in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
|
|
|
# Check that the list is not sorted as expected when sorted only by file_name.
|
|
|
|
sort_options = ['file_name,asc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
|
|
|
self.assertNotEqual(expected, [format_result(r) for r in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
|
|
|
# Check that the list is not sorted as expected when sorted only by points_paid.
|
|
|
|
sort_options = ['points_paid,desc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
|
|
|
self.assertNotEqual(expected, [format_result(r) for r in file_list])
|
2018-05-02 01:30:48 +02:00
|
|
|
|
|
|
|
# Check that the list is not sorted as expected when not sorted at all.
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list()
|
|
|
|
self.assertNotEqual(expected, [format_result(r) for r in file_list])
|
2018-05-07 23:37:41 +02:00
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_sort_by_nested_field(self):
|
|
|
|
extract_authors = lambda file_list: [f['metadata']['author'] for f in file_list]
|
|
|
|
|
|
|
|
sort_options = ['metadata.author']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(self.test_authors, extract_authors(file_list))
|
2018-05-07 23:37:41 +02:00
|
|
|
|
|
|
|
# Check that the list matches the expected in reverse when sorting in descending order.
|
|
|
|
sort_options = ['metadata.author,desc']
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-07-21 22:55:43 +02:00
|
|
|
self.assertEqual(list(reversed(self.test_authors)), extract_authors(file_list))
|
2018-05-07 23:37:41 +02:00
|
|
|
|
|
|
|
# Check that the list is not sorted as expected when not sorted at all.
|
2018-07-22 03:12:33 +02:00
|
|
|
file_list = yield self.test_daemon.jsonrpc_file_list()
|
2018-05-07 23:37:41 +02:00
|
|
|
self.assertNotEqual(self.test_authors, extract_authors(file_list))
|
|
|
|
|
2018-07-22 03:12:33 +02:00
|
|
|
@defer.inlineCallbacks
|
2018-05-07 23:37:41 +02:00
|
|
|
def test_invalid_sort_produces_meaningful_errors(self):
|
|
|
|
sort_options = ['meta.author']
|
2018-07-22 03:12:33 +02:00
|
|
|
expected_message = "Failed to get 'meta.author', key 'meta' was not found."
|
|
|
|
with self.assertRaisesRegex(Exception, expected_message):
|
|
|
|
yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-05-07 23:37:41 +02:00
|
|
|
sort_options = ['metadata.foo.bar']
|
2018-07-22 03:12:33 +02:00
|
|
|
expected_message = "Failed to get 'metadata.foo.bar', key 'foo' was not found."
|
|
|
|
with self.assertRaisesRegex(Exception, expected_message):
|
|
|
|
yield self.test_daemon.jsonrpc_file_list(sort=sort_options)
|
2018-05-02 01:30:48 +02:00
|
|
|
|
|
|
|
def _get_fake_lbry_files(self):
|
2018-05-02 20:32:08 +02:00
|
|
|
return [self._get_fake_lbry_file() for _ in range(10)]
|
2018-05-02 01:30:48 +02:00
|
|
|
|
|
|
|
def _get_fake_lbry_file(self):
|
|
|
|
lbry_file = mock.Mock(spec=ManagedEncryptedFileDownloader)
|
|
|
|
|
|
|
|
file_path = self.faker.file_path()
|
|
|
|
stream_name = self.faker.file_name()
|
2018-05-07 23:37:41 +02:00
|
|
|
channel_claim_id = self.faker.sha1()
|
|
|
|
channel_name = self.faker.simple_profile()['username']
|
2018-05-02 01:30:48 +02:00
|
|
|
faked_attributes = {
|
2018-05-07 23:37:41 +02:00
|
|
|
'channel_claim_id': channel_claim_id,
|
|
|
|
'channel_name': '@' + channel_name,
|
2018-05-02 01:30:48 +02:00
|
|
|
'claim_id': self.faker.sha1(),
|
|
|
|
'claim_name': '-'.join(self.faker.words(4)),
|
|
|
|
'completed': self.faker.boolean(),
|
|
|
|
'download_directory': path.dirname(file_path),
|
|
|
|
'download_path': file_path,
|
|
|
|
'file_name': path.basename(file_path),
|
2018-07-22 03:12:33 +02:00
|
|
|
'key': self.faker.md5(raw_output=True),
|
2018-05-07 23:37:41 +02:00
|
|
|
'metadata': {
|
|
|
|
'author': channel_name,
|
|
|
|
'nsfw': random.randint(0, 1) == 1,
|
|
|
|
},
|
2018-05-02 01:30:48 +02:00
|
|
|
'mime_type': self.faker.mime_type(),
|
|
|
|
'nout': abs(self.faker.pyint()),
|
|
|
|
'outpoint': self.faker.md5() + self.faker.md5(),
|
|
|
|
'points_paid': self.faker.pyfloat(left_digits=1, right_digits=1, positive=True),
|
|
|
|
'sd_hash': self.faker.md5() + self.faker.md5() + self.faker.md5(),
|
|
|
|
'stopped': self.faker.boolean(),
|
|
|
|
'stream_hash': self.faker.md5() + self.faker.md5() + self.faker.md5(),
|
|
|
|
'stream_name': stream_name,
|
|
|
|
'suggested_file_name': stream_name,
|
|
|
|
'txid': self.faker.md5() + self.faker.md5(),
|
|
|
|
'written_bytes': self.faker.pyint(),
|
|
|
|
}
|
|
|
|
|
|
|
|
for key in faked_attributes:
|
|
|
|
setattr(lbry_file, key, faked_attributes[key])
|
|
|
|
|
|
|
|
return lbry_file
|