2019-04-21 05:54:34 +02:00
|
|
|
|
import os.path
|
2019-03-23 05:07:22 +01:00
|
|
|
|
import hashlib
|
2019-03-27 21:02:17 +01:00
|
|
|
|
import tempfile
|
2019-04-21 05:54:34 +02:00
|
|
|
|
import logging
|
2019-03-22 23:44:17 +01:00
|
|
|
|
from binascii import unhexlify
|
2019-04-21 05:54:34 +02:00
|
|
|
|
from urllib.request import urlopen
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-23 05:07:22 +01:00
|
|
|
|
import ecdsa
|
|
|
|
|
|
|
|
|
|
from lbrynet.wallet.transaction import Transaction, Output
|
2019-03-24 21:55:04 +01:00
|
|
|
|
from torba.client.errors import InsufficientFundsError
|
2019-03-22 20:38:14 +01:00
|
|
|
|
from lbrynet.schema.compat import OldClaimMessage
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-02-25 03:48:29 +01:00
|
|
|
|
from lbrynet.testcase import CommandTestCase
|
2019-03-23 05:07:22 +01:00
|
|
|
|
from torba.client.hash import sha256, Base58
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
class ChannelCommands(CommandTestCase):
|
|
|
|
|
|
|
|
|
|
async def test_create_channel_names(self):
|
|
|
|
|
# claim new name
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.channel_create('@foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '8.991893')
|
|
|
|
|
|
|
|
|
|
# fail to claim duplicate
|
|
|
|
|
with self.assertRaisesRegex(Exception, "You already have a channel under the name '@foo'."):
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.channel_create('@foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# fail to claim invalid name
|
2019-03-26 03:06:36 +01:00
|
|
|
|
with self.assertRaisesRegex(Exception, "Channel names must start with '@' symbol."):
|
|
|
|
|
await self.channel_create('foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# nothing's changed after failed attempts
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '8.991893')
|
|
|
|
|
|
|
|
|
|
# succeed overriding duplicate restriction
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.channel_create('@foo', allow_duplicate_name=True)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 2)
|
|
|
|
|
await self.assertBalance(self.account, '7.983786')
|
|
|
|
|
|
|
|
|
|
async def test_channel_bids(self):
|
|
|
|
|
# enough funds
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.channel_create('@foo', '5.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
claim_id = tx['outputs'][0]['claim_id']
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '4.991893')
|
|
|
|
|
|
|
|
|
|
# bid preserved on update
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.channel_update(claim_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '5.0')
|
|
|
|
|
|
|
|
|
|
# bid changed on update
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.channel_update(claim_id, bid='4.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '4.0')
|
|
|
|
|
|
|
|
|
|
await self.assertBalance(self.account, '5.991447')
|
|
|
|
|
|
|
|
|
|
# not enough funds
|
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
|
InsufficientFundsError, "Not enough funds to cover this transaction."):
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.channel_create('@foo2', '9.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '5.991447')
|
|
|
|
|
|
|
|
|
|
# spend exactly amount available, no change
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.channel_create('@foo3', '5.981266')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
await self.assertBalance(self.account, '0.0')
|
|
|
|
|
self.assertEqual(len(tx['outputs']), 1) # no change
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 2)
|
|
|
|
|
|
|
|
|
|
async def test_setting_channel_fields(self):
|
|
|
|
|
values = {
|
|
|
|
|
'title': "Cool Channel",
|
|
|
|
|
'description': "Best channel on LBRY.",
|
2019-03-25 17:30:30 +01:00
|
|
|
|
'thumbnail_url': "https://co.ol/thumbnail.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'tags': ["cool", "awesome"],
|
2019-03-30 01:26:10 +01:00
|
|
|
|
'languages': ["en-US"],
|
|
|
|
|
'locations': ['US::Manchester'],
|
2019-04-20 07:12:43 +02:00
|
|
|
|
'email': "human@email.com",
|
|
|
|
|
'website_url': "https://co.ol",
|
2019-03-25 17:30:30 +01:00
|
|
|
|
'cover_url': "https://co.ol/cover.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'featured': ['cafe']
|
2019-03-24 21:55:04 +01:00
|
|
|
|
}
|
2019-03-28 01:32:43 +01:00
|
|
|
|
fixed_values = values.copy()
|
2019-04-20 07:12:43 +02:00
|
|
|
|
fixed_values['thumbnail'] = {'url': fixed_values.pop('thumbnail_url')}
|
2019-04-21 05:54:34 +02:00
|
|
|
|
fixed_values['locations'] = [{'country': 'US', 'city': 'Manchester'}]
|
2019-04-20 07:12:43 +02:00
|
|
|
|
fixed_values['cover'] = {'url': fixed_values.pop('cover_url')}
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# create new channel with all fields set
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.channel_create('@bigchannel', **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
|
self.assertEqual(channel, {'public_key': channel['public_key'], **fixed_values})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# create channel with nothing set
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.channel_create('@lightchannel'))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
|
self.assertEqual(channel, {'public_key': channel['public_key']})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# create channel with just a featured claim
|
|
|
|
|
tx = await self.out(self.channel_create('@featurechannel', featured='beef'))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
txo = tx['outputs'][0]
|
2019-04-21 05:54:34 +02:00
|
|
|
|
claim_id, channel = txo['claim_id'], txo['value']
|
|
|
|
|
fixed_values['public_key'] = channel['public_key']
|
|
|
|
|
self.assertEqual(channel, {'public_key': fixed_values['public_key'], 'featured': ['beef']})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# update channel setting all fields
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.channel_update(claim_id, **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
|
fixed_values['featured'].insert(0, 'beef') # existing featured claim
|
|
|
|
|
self.assertEqual(channel, fixed_values)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# clearing and settings featured content
|
|
|
|
|
tx = await self.out(self.channel_update(claim_id, featured='beefcafe', clear_featured=True))
|
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
|
fixed_values['featured'] = ['beefcafe']
|
|
|
|
|
self.assertEqual(channel, fixed_values)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# reset signing key
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.channel_update(claim_id, new_signing_key=True))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
|
self.assertNotEqual(channel['public_key'], fixed_values['public_key'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-04-27 02:44:38 +02:00
|
|
|
|
# replace mode (clears everything except public_key)
|
|
|
|
|
tx = await self.out(self.channel_update(claim_id, replace=True, title='foo', email='new@email.com'))
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
tx['outputs'][0]['value'],
|
|
|
|
|
{'public_key': channel['public_key'], 'title': 'foo', 'email': 'new@email.com'}
|
|
|
|
|
)
|
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# send channel to someone else
|
2019-04-06 21:55:08 +02:00
|
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
account2_id, account2 = new_account['id'], self.daemon.get_account_or_error(new_account['id'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# before sending
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 3)
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list(account_id=account2_id)), 0)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
other_address = await account2.receiving.get_or_create_usable_address()
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.channel_update(claim_id, claim_address=other_address))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# after sending
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list()), 2)
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_channel_list(account_id=account2_id)), 1)
|
|
|
|
|
|
|
|
|
|
# shoud not have private key
|
|
|
|
|
txo = (await account2.get_channels())[0]
|
|
|
|
|
self.assertIsNone(txo.private_key)
|
|
|
|
|
|
|
|
|
|
# send the private key too
|
|
|
|
|
txoid = f"{tx['outputs'][0]['txid']}:{tx['outputs'][0]['nout']}"
|
|
|
|
|
account2.channel_keys[txoid] = self.account.channel_keys[txoid]
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# now should have private key
|
|
|
|
|
txo = (await account2.get_channels())[0]
|
|
|
|
|
self.assertIsNotNone(txo.private_key)
|
|
|
|
|
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
class StreamCommands(CommandTestCase):
|
2019-03-25 00:45:54 +01:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
files_directory = os.path.join(os.path.dirname(__file__), 'files')
|
|
|
|
|
video_file_url = 'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerEscapes.mp4'
|
|
|
|
|
video_file_name = os.path.join(files_directory, 'ForBiggerEscapes.mp4')
|
|
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
|
if not os.path.exists(self.video_file_name):
|
|
|
|
|
if not os.path.exists(self.files_directory):
|
|
|
|
|
os.mkdir(self.files_directory)
|
|
|
|
|
log.info(f'downloading test video from {self.video_file_name}')
|
|
|
|
|
with urlopen(self.video_file_url) as response,\
|
|
|
|
|
open(self.video_file_name, 'wb') as video_file:
|
|
|
|
|
video_file.write(response.read())
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
async def test_create_stream_names(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# claim new name
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_create('foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '8.993893')
|
|
|
|
|
|
|
|
|
|
# fail to claim duplicate
|
2019-03-26 03:06:36 +01:00
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
|
Exception, "You already have a stream claim published under the name 'foo'."):
|
|
|
|
|
await self.stream_create('foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# fail claim starting with @
|
2019-03-26 03:06:36 +01:00
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
|
Exception, "Stream names cannot start with '@' symbol."):
|
|
|
|
|
await self.stream_create('@foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '8.993893')
|
|
|
|
|
|
|
|
|
|
# succeed overriding duplicate restriction
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_create('foo', allow_duplicate_name=True)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 2)
|
|
|
|
|
await self.assertBalance(self.account, '7.987786')
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
async def test_stream_bids(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
|
# enough funds
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.stream_create('foo', '2.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
claim_id = tx['outputs'][0]['claim_id']
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 1)
|
|
|
|
|
await self.assertBalance(self.account, '7.993893')
|
|
|
|
|
|
|
|
|
|
# bid preserved on update
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.stream_update(claim_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '2.0')
|
|
|
|
|
|
|
|
|
|
# bid changed on update
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.stream_update(claim_id, bid='3.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '3.0')
|
|
|
|
|
|
2019-04-29 21:18:28 +02:00
|
|
|
|
await self.assertBalance(self.account, '6.993319')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# not enough funds
|
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
|
InsufficientFundsError, "Not enough funds to cover this transaction."):
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_create('foo2', '9.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 1)
|
2019-04-29 21:18:28 +02:00
|
|
|
|
await self.assertBalance(self.account, '6.993319')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# spend exactly amount available, no change
|
2019-04-21 05:54:34 +02:00
|
|
|
|
tx = await self.stream_create('foo3', '6.98523')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
await self.assertBalance(self.account, '0.0')
|
|
|
|
|
self.assertEqual(len(tx['outputs']), 1) # no change
|
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 2)
|
|
|
|
|
|
|
|
|
|
async def test_publishing_checks_all_accounts_for_channel(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
|
account1_id, account1 = self.account.id, self.account
|
2019-04-06 21:55:08 +02:00
|
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-02-11 23:45:52 +01:00
|
|
|
|
account2_id, account2 = new_account['id'], self.daemon.get_account_or_error(new_account['id'])
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.out(self.channel_create('@spam', '1.0'))
|
2019-02-11 23:45:52 +01:00
|
|
|
|
self.assertEqual('8.989893', await self.daemon.jsonrpc_account_balance())
|
|
|
|
|
|
2019-03-25 04:12:57 +01:00
|
|
|
|
result = await self.out(self.daemon.jsonrpc_account_send(
|
2019-02-11 23:45:52 +01:00
|
|
|
|
'5.0', await self.daemon.jsonrpc_address_unused(account2_id)
|
|
|
|
|
))
|
|
|
|
|
await self.confirm_tx(result['txid'])
|
|
|
|
|
|
|
|
|
|
self.assertEqual('3.989769', await self.daemon.jsonrpc_account_balance())
|
|
|
|
|
self.assertEqual('5.0', await self.daemon.jsonrpc_account_balance(account2_id))
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
baz_tx = await self.out(self.channel_create('@baz', '1.0', account_id=account2_id))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
baz_id = baz_tx['outputs'][0]['claim_id']
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
|
|
channels = await self.out(self.daemon.jsonrpc_channel_list(account1_id))
|
|
|
|
|
self.assertEqual(len(channels), 1)
|
|
|
|
|
self.assertEqual(channels[0]['name'], '@spam')
|
|
|
|
|
self.assertEqual(channels, await self.out(self.daemon.jsonrpc_channel_list()))
|
|
|
|
|
|
|
|
|
|
channels = await self.out(self.daemon.jsonrpc_channel_list(account2_id))
|
|
|
|
|
self.assertEqual(len(channels), 1)
|
|
|
|
|
self.assertEqual(channels[0]['name'], '@baz')
|
|
|
|
|
|
|
|
|
|
# defaults to using all accounts to lookup channel
|
2019-04-27 17:12:02 +02:00
|
|
|
|
await self.stream_create('hovercraft1', '0.1', channel_id=baz_id)
|
2019-03-27 21:02:17 +01:00
|
|
|
|
self.assertEqual((await self.claim_search('hovercraft1'))[0]['channel_name'], '@baz')
|
2019-04-27 17:12:02 +02:00
|
|
|
|
# lookup by channel_name in all accounts
|
|
|
|
|
await self.stream_create('hovercraft2', '0.1', channel_name='@baz')
|
2019-03-27 21:02:17 +01:00
|
|
|
|
self.assertEqual((await self.claim_search('hovercraft2'))[0]['channel_name'], '@baz')
|
2019-04-27 17:12:02 +02:00
|
|
|
|
# uses only the specific accounts which contains the channel
|
|
|
|
|
await self.stream_create('hovercraft3', '0.1', channel_id=baz_id, channel_account_id=[account2_id])
|
|
|
|
|
self.assertEqual((await self.claim_search('hovercraft3'))[0]['channel_name'], '@baz')
|
|
|
|
|
# lookup by channel_name in specific account
|
|
|
|
|
await self.stream_create('hovercraft4', '0.1', channel_name='@baz', channel_account_id=[account2_id])
|
|
|
|
|
self.assertEqual((await self.claim_search('hovercraft4'))[0]['channel_name'], '@baz')
|
2019-02-11 23:45:52 +01:00
|
|
|
|
# fails when specifying account which does not contain channel
|
2019-03-24 21:55:04 +01:00
|
|
|
|
with self.assertRaisesRegex(ValueError, "Couldn't find channel with channel_id"):
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_create(
|
2019-04-27 17:12:02 +02:00
|
|
|
|
'hovercraft5', '0.1', channel_id=baz_id, channel_account_id=[account1_id]
|
|
|
|
|
)
|
|
|
|
|
# fail with channel_name
|
|
|
|
|
with self.assertRaisesRegex(ValueError, "Couldn't find channel with channel_name '@baz'"):
|
|
|
|
|
await self.stream_create(
|
|
|
|
|
'hovercraft5', '0.1', channel_name='@baz', channel_account_id=[account1_id]
|
2019-03-24 21:55:04 +01:00
|
|
|
|
)
|
|
|
|
|
|
2019-04-27 18:31:51 +02:00
|
|
|
|
async def test_preview_works_with_signed_streams(self):
|
|
|
|
|
await self.out(self.channel_create('@spam', '1.0'))
|
|
|
|
|
signed = await self.out(self.stream_create('bar', '1.0', channel_name='@spam', preview=True, confirm=False))
|
|
|
|
|
self.assertTrue(signed['outputs'][0]['is_channel_signature_valid'])
|
|
|
|
|
|
2019-04-27 16:29:53 +02:00
|
|
|
|
async def test_publish_updates_file_list(self):
|
|
|
|
|
tx = await self.out(self.stream_create(title='created'))
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
claim_id, expected = txo['claim_id'], txo['value']
|
|
|
|
|
files = self.sout(self.daemon.jsonrpc_file_list())
|
|
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
|
|
|
|
|
# update with metadata-only changes
|
|
|
|
|
tx = await self.out(self.stream_update(claim_id, title='update 1'))
|
|
|
|
|
files = self.sout(self.daemon.jsonrpc_file_list())
|
|
|
|
|
expected['title'] = 'update 1'
|
|
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
|
|
|
|
|
# update with new data
|
|
|
|
|
tx = await self.out(self.stream_update(claim_id, title='update 2', data=b'updated data'))
|
|
|
|
|
expected = tx['outputs'][0]['value']
|
|
|
|
|
files = self.sout(self.daemon.jsonrpc_file_list())
|
|
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
|
2019-03-30 01:26:10 +01:00
|
|
|
|
async def test_setting_stream_fields(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
|
values = {
|
2019-03-25 14:59:32 +01:00
|
|
|
|
'title': "Cool Content",
|
|
|
|
|
'description': "Best content on LBRY.",
|
2019-03-25 17:30:30 +01:00
|
|
|
|
'thumbnail_url': "https://co.ol/thumbnail.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'tags': ["cool", "awesome"],
|
2019-03-30 01:26:10 +01:00
|
|
|
|
'languages': ["en"],
|
2019-05-09 20:06:19 +02:00
|
|
|
|
'locations': ['US:NH:Manchester:03101:42.990605:-71.460989'],
|
2019-03-25 17:30:30 +01:00
|
|
|
|
|
|
|
|
|
'author': "Jules Verne",
|
2019-03-25 14:59:32 +01:00
|
|
|
|
'license': 'Public Domain',
|
2019-03-25 17:30:30 +01:00
|
|
|
|
'license_url': "https://co.ol/license",
|
|
|
|
|
'release_time': 123456,
|
|
|
|
|
|
2019-03-25 14:59:32 +01:00
|
|
|
|
'fee_currency': 'usd',
|
|
|
|
|
'fee_amount': '2.99',
|
|
|
|
|
'fee_address': 'mmCsWAiXMUVecFQ3fVzUwvpT9XFMXno2Ca',
|
2019-03-24 21:55:04 +01:00
|
|
|
|
}
|
2019-03-28 01:32:43 +01:00
|
|
|
|
fixed_values = values.copy()
|
2019-05-09 20:06:19 +02:00
|
|
|
|
fixed_values['locations'] = [{
|
|
|
|
|
'country': 'US',
|
|
|
|
|
'state': 'NH',
|
|
|
|
|
'city': 'Manchester',
|
|
|
|
|
'code': '03101',
|
|
|
|
|
'latitude': '42.990605',
|
|
|
|
|
'longitude': '-71.460989'
|
|
|
|
|
}]
|
2019-04-20 07:12:43 +02:00
|
|
|
|
fixed_values['thumbnail'] = {'url': fixed_values.pop('thumbnail_url')}
|
2019-03-25 14:59:32 +01:00
|
|
|
|
fixed_values['release_time'] = str(values['release_time'])
|
2019-04-20 07:12:43 +02:00
|
|
|
|
fixed_values['source'] = {
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-20 07:12:43 +02:00
|
|
|
|
'media_type': 'application/octet-stream',
|
|
|
|
|
'size': '3'
|
|
|
|
|
}
|
2019-03-25 14:59:32 +01:00
|
|
|
|
fixed_values['fee'] = {
|
2019-03-28 01:32:43 +01:00
|
|
|
|
'address': fixed_values.pop('fee_address'),
|
2019-05-09 20:06:19 +02:00
|
|
|
|
'amount': fixed_values.pop('fee_amount'),
|
2019-03-25 14:59:32 +01:00
|
|
|
|
'currency': fixed_values.pop('fee_currency').upper()
|
|
|
|
|
}
|
2019-04-20 07:12:43 +02:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# create new stream with all fields set
|
2019-04-20 07:12:43 +02:00
|
|
|
|
tx = await self.out(self.stream_create('big', **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
stream = tx['outputs'][0]['value']
|
|
|
|
|
fixed_values['source']['name'] = stream['source']['name']
|
2019-04-20 07:12:43 +02:00
|
|
|
|
fixed_values['source']['sd_hash'] = stream['source']['sd_hash']
|
2019-03-25 14:59:32 +01:00
|
|
|
|
self.assertEqual(stream, fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# create stream with nothing set
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.stream_create('light'))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
stream = tx['outputs'][0]['value']
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(
|
2019-04-21 05:54:34 +02:00
|
|
|
|
stream, {
|
2019-04-20 07:12:43 +02:00
|
|
|
|
'source': {
|
|
|
|
|
'size': '3',
|
|
|
|
|
'media_type': 'application/octet-stream',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'name': stream['source']['name'],
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'sd_hash': stream['source']['sd_hash']
|
2019-04-20 07:12:43 +02:00
|
|
|
|
},
|
2019-03-25 14:59:32 +01:00
|
|
|
|
}
|
2019-03-24 21:55:04 +01:00
|
|
|
|
)
|
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# create stream with just some tags, langs and locations
|
|
|
|
|
tx = await self.out(self.stream_create('updated', tags='blah', languages='uk', locations='UA::Kyiv'))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
txo = tx['outputs'][0]
|
2019-04-21 05:54:34 +02:00
|
|
|
|
claim_id, stream = txo['claim_id'], txo['value']
|
|
|
|
|
fixed_values['source']['name'] = stream['source']['name']
|
|
|
|
|
fixed_values['source']['sd_hash'] = stream['source']['sd_hash']
|
2019-03-24 21:55:04 +01:00
|
|
|
|
self.assertEqual(
|
2019-04-21 05:54:34 +02:00
|
|
|
|
stream, {
|
2019-04-20 07:12:43 +02:00
|
|
|
|
'source': {
|
|
|
|
|
'size': '3',
|
|
|
|
|
'media_type': 'application/octet-stream',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'name': fixed_values['source']['name'],
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-20 07:12:43 +02:00
|
|
|
|
'sd_hash': fixed_values['source']['sd_hash'],
|
|
|
|
|
},
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'tags': ['blah'],
|
|
|
|
|
'languages': ['uk'],
|
|
|
|
|
'locations': [{'country': 'UA', 'city': 'Kyiv'}]
|
2019-03-25 14:59:32 +01:00
|
|
|
|
}
|
2019-03-24 21:55:04 +01:00
|
|
|
|
)
|
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
# update stream setting all fields, 'source' doesn't change
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.stream_update(claim_id, **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
|
stream = tx['outputs'][0]['value']
|
2019-03-25 14:59:32 +01:00
|
|
|
|
fixed_values['tags'].insert(0, 'blah') # existing tag
|
2019-04-21 05:54:34 +02:00
|
|
|
|
fixed_values['languages'].insert(0, 'uk') # existing language
|
|
|
|
|
fixed_values['locations'].insert(0, {'country': 'UA', 'city': 'Kyiv'}) # existing location
|
|
|
|
|
self.assertEqual(stream, fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-04-26 23:10:09 +02:00
|
|
|
|
# clearing and settings tags, languages and locations
|
2019-04-21 05:54:34 +02:00
|
|
|
|
tx = await self.out(self.stream_update(
|
|
|
|
|
claim_id, tags='single', clear_tags=True,
|
|
|
|
|
languages='pt', clear_languages=True,
|
|
|
|
|
locations='BR', clear_locations=True,
|
|
|
|
|
))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
txo = tx['outputs'][0]
|
2019-03-25 14:59:32 +01:00
|
|
|
|
fixed_values['tags'] = ['single']
|
2019-04-21 05:54:34 +02:00
|
|
|
|
fixed_values['languages'] = ['pt']
|
|
|
|
|
fixed_values['locations'] = [{'country': 'BR'}]
|
2019-04-20 07:12:43 +02:00
|
|
|
|
self.assertEqual(txo['value'], fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-04-26 23:10:09 +02:00
|
|
|
|
# clearing fee
|
|
|
|
|
tx = await self.out(self.stream_update(claim_id, clear_fee=True))
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
del fixed_values['fee']
|
|
|
|
|
self.assertEqual(txo['value'], fixed_values)
|
|
|
|
|
|
2019-04-22 21:24:51 +02:00
|
|
|
|
# modifying hash/size/name
|
|
|
|
|
fixed_values['source']['name'] = 'changed_name'
|
|
|
|
|
fixed_values['source']['hash'] = 'cafebeef'
|
|
|
|
|
fixed_values['source']['size'] = '42'
|
|
|
|
|
tx = await self.out(self.stream_update(
|
|
|
|
|
claim_id, file_name='changed_name', file_hash='cafebeef', file_size=42
|
|
|
|
|
))
|
|
|
|
|
self.assertEqual(tx['outputs'][0]['value'], fixed_values)
|
|
|
|
|
|
2019-05-09 05:35:25 +02:00
|
|
|
|
# stream_update re-signs with the same channel
|
|
|
|
|
channel_id = (await self.channel_create('@chan'))['outputs'][0]['claim_id']
|
|
|
|
|
tx = await self.stream_update(claim_id, channel_id=channel_id)
|
|
|
|
|
self.assertEqual(tx['outputs'][0]['signing_channel']['name'], '@chan')
|
|
|
|
|
tx = await self.stream_update(claim_id, title='channel re-signs')
|
|
|
|
|
self.assertEqual(tx['outputs'][0]['value']['title'], 'channel re-signs')
|
|
|
|
|
self.assertEqual(tx['outputs'][0]['signing_channel']['name'], '@chan')
|
|
|
|
|
|
2019-03-25 14:59:32 +01:00
|
|
|
|
# send claim to someone else
|
2019-04-06 21:55:08 +02:00
|
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
account2_id, account2 = new_account['id'], self.daemon.get_account_or_error(new_account['id'])
|
|
|
|
|
|
|
|
|
|
# before sending
|
2019-05-09 05:35:25 +02:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 4)
|
2019-03-25 14:59:32 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list(account_id=account2_id)), 0)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
other_address = await account2.receiving.get_or_create_usable_address()
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.out(self.stream_update(claim_id, claim_address=other_address))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
|
|
# after sending
|
2019-05-09 05:35:25 +02:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list()), 3)
|
2019-03-25 14:59:32 +01:00
|
|
|
|
self.assertEqual(len(await self.daemon.jsonrpc_claim_list(account_id=account2_id)), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-04-22 04:12:02 +02:00
|
|
|
|
async def test_automatic_type_and_metadata_detection_for_image(self):
|
|
|
|
|
with tempfile.NamedTemporaryFile(suffix='.png') as file:
|
|
|
|
|
file.write(unhexlify(
|
|
|
|
|
b'89504e470d0a1a0a0000000d49484452000000050000000708020000004fc'
|
|
|
|
|
b'510b9000000097048597300000b1300000b1301009a9c1800000015494441'
|
|
|
|
|
b'5408d763fcffff3f031260624005d4e603004c45030b5286e9ea000000004'
|
|
|
|
|
b'9454e44ae426082'
|
|
|
|
|
))
|
|
|
|
|
file.flush()
|
|
|
|
|
tx = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_stream_create(
|
|
|
|
|
'blank-image', '1.0', file_path=file.name
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
txo['value'], {
|
|
|
|
|
'source': {
|
|
|
|
|
'size': '99',
|
|
|
|
|
'name': os.path.basename(file.name),
|
|
|
|
|
'media_type': 'image/png',
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '6c7df435d412c603390f593ef658c199817c7830ba3f16b7eadd8f99fa50e85dbd0d2b3dc61eadc33fe096e3872d1545',
|
2019-04-22 04:12:02 +02:00
|
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
|
},
|
|
|
|
|
'stream_type': 'image',
|
|
|
|
|
'image': {
|
|
|
|
|
'width': 5,
|
|
|
|
|
'height': 7
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
async def test_automatic_type_and_metadata_detection_for_video(self):
|
2019-04-21 05:54:34 +02:00
|
|
|
|
tx = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_stream_create(
|
|
|
|
|
'chrome', '1.0', file_path=self.video_file_name
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
txo['value'], {
|
|
|
|
|
'source': {
|
|
|
|
|
'size': '2299653',
|
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
|
},
|
2019-04-22 04:12:02 +02:00
|
|
|
|
'stream_type': 'video',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'video': {
|
|
|
|
|
'width': 1280,
|
|
|
|
|
'height': 720,
|
|
|
|
|
'duration': 15
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
async def test_overriding_automatic_metadata_detection(self):
|
|
|
|
|
tx = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_stream_create(
|
|
|
|
|
'chrome', '1.0', file_path=self.video_file_name, width=99, height=88, duration=9
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
txo['value'], {
|
|
|
|
|
'source': {
|
|
|
|
|
'size': '2299653',
|
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
|
},
|
2019-04-22 04:12:02 +02:00
|
|
|
|
'stream_type': 'video',
|
2019-04-21 05:54:34 +02:00
|
|
|
|
'video': {
|
|
|
|
|
'width': 99,
|
|
|
|
|
'height': 88,
|
|
|
|
|
'duration': 9
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2019-04-27 02:44:38 +02:00
|
|
|
|
async def test_replace_mode_preserves_source_and_type(self):
|
|
|
|
|
expected = {
|
|
|
|
|
'tags': ['blah'],
|
|
|
|
|
'languages': ['uk'],
|
|
|
|
|
'locations': [{'country': 'UA', 'city': 'Kyiv'}],
|
|
|
|
|
'source': {
|
|
|
|
|
'size': '2299653',
|
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-27 02:44:38 +02:00
|
|
|
|
},
|
|
|
|
|
'stream_type': 'video',
|
|
|
|
|
'video': {
|
|
|
|
|
'width': 1280,
|
|
|
|
|
'height': 720,
|
|
|
|
|
'duration': 15
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-05-09 05:06:33 +02:00
|
|
|
|
channel = await self.channel_create('@chan')
|
2019-04-27 02:44:38 +02:00
|
|
|
|
tx = await self.out(self.daemon.jsonrpc_stream_create(
|
|
|
|
|
'chrome', '1.0', file_path=self.video_file_name,
|
2019-05-09 05:06:33 +02:00
|
|
|
|
tags='blah', languages='uk', locations='UA::Kyiv',
|
|
|
|
|
channel_id=channel['outputs'][0]['claim_id']
|
2019-04-27 02:44:38 +02:00
|
|
|
|
))
|
|
|
|
|
await self.on_transaction_dict(tx)
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
expected['source']['sd_hash'] = txo['value']['source']['sd_hash']
|
|
|
|
|
self.assertEqual(txo['value'], expected)
|
2019-05-09 05:06:33 +02:00
|
|
|
|
self.assertEqual(txo['signing_channel']['name'], '@chan')
|
2019-04-27 02:44:38 +02:00
|
|
|
|
tx = await self.out(self.daemon.jsonrpc_stream_update(
|
|
|
|
|
txo['claim_id'], title='new title', replace=True
|
|
|
|
|
))
|
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
|
expected['title'] = 'new title'
|
|
|
|
|
del expected['tags']
|
|
|
|
|
del expected['languages']
|
|
|
|
|
del expected['locations']
|
|
|
|
|
self.assertEqual(txo['value'], expected)
|
2019-05-09 05:06:33 +02:00
|
|
|
|
self.assertNotIn('signing_channel', txo)
|
2019-04-27 02:44:38 +02:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
async def test_create_update_and_abandon_stream(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
|
await self.assertBalance(self.account, '10.0')
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.stream_create(bid='2.5') # creates new claim
|
2019-03-24 21:55:04 +01:00
|
|
|
|
claim_id = tx['outputs'][0]['claim_id']
|
|
|
|
|
txs = await self.out(self.daemon.jsonrpc_transaction_list())
|
|
|
|
|
self.assertEqual(len(txs[0]['claim_info']), 1)
|
|
|
|
|
self.assertEqual(txs[0]['confirmations'], 1)
|
|
|
|
|
self.assertEqual(txs[0]['claim_info'][0]['balance_delta'], '-2.5')
|
|
|
|
|
self.assertEqual(txs[0]['claim_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
|
|
|
|
self.assertEqual(txs[0]['fee'], '-0.020107')
|
|
|
|
|
await self.assertBalance(self.account, '7.479893')
|
2019-05-10 17:24:04 +02:00
|
|
|
|
self.assertEqual(1, len(self.daemon.jsonrpc_file_list()))
|
|
|
|
|
|
|
|
|
|
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
|
|
|
|
self.assertEqual(0, len(self.daemon.jsonrpc_file_list()))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_update(claim_id, bid='1.0') # updates previous claim
|
2019-03-24 21:55:04 +01:00
|
|
|
|
txs = await self.out(self.daemon.jsonrpc_transaction_list())
|
|
|
|
|
self.assertEqual(len(txs[0]['update_info']), 1)
|
|
|
|
|
self.assertEqual(txs[0]['update_info'][0]['balance_delta'], '1.5')
|
|
|
|
|
self.assertEqual(txs[0]['update_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
2019-04-29 21:18:28 +02:00
|
|
|
|
self.assertEqual(txs[0]['fee'], '-0.0002165')
|
|
|
|
|
await self.assertBalance(self.account, '8.9796765')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.stream_abandon(claim_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
txs = await self.out(self.daemon.jsonrpc_transaction_list())
|
|
|
|
|
self.assertEqual(len(txs[0]['abandon_info']), 1)
|
|
|
|
|
self.assertEqual(txs[0]['abandon_info'][0]['balance_delta'], '1.0')
|
|
|
|
|
self.assertEqual(txs[0]['abandon_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
|
|
|
|
self.assertEqual(txs[0]['fee'], '-0.000107')
|
2019-04-29 21:18:28 +02:00
|
|
|
|
await self.assertBalance(self.account, '9.9795695')
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
async def test_abandoning_stream_at_loss(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
|
await self.assertBalance(self.account, '10.0')
|
2019-03-26 03:06:36 +01:00
|
|
|
|
tx = await self.stream_create(bid='0.0001')
|
2019-02-11 23:45:52 +01:00
|
|
|
|
await self.assertBalance(self.account, '9.979793')
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.stream_abandon(tx['outputs'][0]['claim_id'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
await self.assertBalance(self.account, '9.97968399')
|
|
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
|
async def test_publish(self):
|
|
|
|
|
|
|
|
|
|
# errors on missing arguments to create a stream
|
|
|
|
|
with self.assertRaisesRegex(Exception, "'bid' is a required argument for new publishes."):
|
|
|
|
|
await self.daemon.jsonrpc_publish('foo')
|
|
|
|
|
|
|
|
|
|
with self.assertRaisesRegex(Exception, "'file_path' is a required argument for new publishes."):
|
|
|
|
|
await self.daemon.jsonrpc_publish('foo', bid='1.0')
|
|
|
|
|
|
|
|
|
|
# successfully create stream
|
|
|
|
|
with tempfile.NamedTemporaryFile() as file:
|
|
|
|
|
file.write(b'hi')
|
|
|
|
|
file.flush()
|
|
|
|
|
tx1 = await self.publish('foo', bid='1.0', file_path=file.name)
|
|
|
|
|
|
2019-04-16 15:27:19 +02:00
|
|
|
|
self.assertEqual(1, len(self.daemon.jsonrpc_file_list()))
|
|
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
|
# doesn't error on missing arguments when doing an update stream
|
|
|
|
|
tx2 = await self.publish('foo', tags='updated')
|
2019-04-16 15:27:19 +02:00
|
|
|
|
|
|
|
|
|
self.assertEqual(1, len(self.daemon.jsonrpc_file_list()))
|
2019-03-27 21:02:17 +01:00
|
|
|
|
self.assertEqual(
|
|
|
|
|
tx1['outputs'][0]['claim_id'],
|
|
|
|
|
tx2['outputs'][0]['claim_id']
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# update conflict with two claims of the same name
|
|
|
|
|
tx3 = await self.stream_create('foo', allow_duplicate_name=True)
|
|
|
|
|
with self.assertRaisesRegex(Exception, "There are 2 claims for 'foo'"):
|
|
|
|
|
await self.daemon.jsonrpc_publish('foo')
|
|
|
|
|
|
2019-04-16 15:27:19 +02:00
|
|
|
|
self.assertEqual(2, len(self.daemon.jsonrpc_file_list()))
|
2019-03-30 02:41:24 +01:00
|
|
|
|
# abandon duplicate stream
|
|
|
|
|
await self.stream_abandon(tx3['outputs'][0]['claim_id'])
|
2019-03-27 21:02:17 +01:00
|
|
|
|
|
|
|
|
|
# publish to a channel
|
|
|
|
|
await self.channel_create('@abc')
|
|
|
|
|
tx3 = await self.publish('foo', channel_name='@abc')
|
2019-04-16 15:27:19 +02:00
|
|
|
|
self.assertEqual(2, len(self.daemon.jsonrpc_file_list()))
|
2019-03-27 21:02:17 +01:00
|
|
|
|
r = await self.resolve('lbry://@abc/foo')
|
|
|
|
|
self.assertEqual(
|
|
|
|
|
r['lbry://@abc/foo']['claim']['claim_id'],
|
|
|
|
|
tx3['outputs'][0]['claim_id']
|
|
|
|
|
)
|
|
|
|
|
|
2019-05-09 05:35:25 +02:00
|
|
|
|
# publishing again clears channel
|
2019-03-30 01:52:27 +01:00
|
|
|
|
tx4 = await self.publish('foo', languages='uk-UA')
|
2019-04-16 15:27:19 +02:00
|
|
|
|
self.assertEqual(2, len(self.daemon.jsonrpc_file_list()))
|
2019-05-09 05:35:25 +02:00
|
|
|
|
r = await self.resolve('lbry://foo')
|
|
|
|
|
claim = r['lbry://foo']['claim']
|
2019-03-30 01:52:27 +01:00
|
|
|
|
self.assertEqual(claim['txid'], tx4['outputs'][0]['txid'])
|
2019-05-09 05:35:25 +02:00
|
|
|
|
self.assertNotIn('channel_name', claim)
|
2019-04-20 07:12:43 +02:00
|
|
|
|
self.assertEqual(claim['value']['languages'], ['uk-UA'])
|
2019-03-30 01:52:27 +01:00
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
async def test_claim_search(self):
|
|
|
|
|
# search for channel claim
|
|
|
|
|
channel = await self.channel_create('@abc', '1.0')
|
|
|
|
|
channel_id, txid = channel['outputs'][0]['claim_id'], channel['txid']
|
|
|
|
|
value = channel['outputs'][0]['value']
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
claims = await self.claim_search('@abc')
|
|
|
|
|
self.assertEqual(claims[0]['value'], value)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
claims = await self.claim_search(txid=txid, nout=0)
|
|
|
|
|
self.assertEqual(claims[0]['value'], value)
|
|
|
|
|
|
|
|
|
|
claims = await self.claim_search(claim_id=channel_id)
|
|
|
|
|
self.assertEqual(claims[0]['value'], value)
|
|
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.channel_abandon(txid=txid, nout=0)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
self.assertEqual(len(await self.claim_search(txid=txid, nout=0)), 0)
|
|
|
|
|
|
|
|
|
|
# search stream claims
|
|
|
|
|
channel = await self.channel_create('@abc', '1.0')
|
|
|
|
|
channel_id, txid = channel['outputs'][0]['claim_id'], channel['txid']
|
|
|
|
|
|
|
|
|
|
signed = await self.stream_create('on-channel-claim', '0.0001', channel_id=channel_id)
|
|
|
|
|
unsigned = await self.stream_create('unsigned', '0.0001')
|
|
|
|
|
|
|
|
|
|
claims = await self.claim_search('on-channel-claim')
|
|
|
|
|
self.assertEqual(claims[0]['value'], signed['outputs'][0]['value'])
|
|
|
|
|
|
|
|
|
|
claims = await self.claim_search('unsigned')
|
|
|
|
|
self.assertEqual(claims[0]['value'], unsigned['outputs'][0]['value'])
|
|
|
|
|
|
|
|
|
|
# list streams in a channel
|
|
|
|
|
await self.stream_create('on-channel-claim-2', '0.0001', channel_id=channel_id)
|
|
|
|
|
await self.stream_create('on-channel-claim-3', '0.0001', channel_id=channel_id)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
claims = await self.claim_search(channel_id=channel_id)
|
|
|
|
|
self.assertEqual(len(claims), 3)
|
2019-04-26 22:59:06 +02:00
|
|
|
|
# same is expected using name or name#claim_id urls
|
|
|
|
|
claims = await self.claim_search(channel_name="@abc")
|
|
|
|
|
self.assertEqual(len(claims), 3)
|
|
|
|
|
claims = await self.claim_search(channel_name="@abc", channel_id=channel_id)
|
|
|
|
|
self.assertEqual(len(claims), 3)
|
|
|
|
|
claims = await self.claim_search(channel_name=f"@abc#{channel_id}")
|
|
|
|
|
self.assertEqual(len(claims), 3)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.stream_abandon(claim_id=claims[0]['claim_id'])
|
|
|
|
|
await self.stream_abandon(claim_id=claims[1]['claim_id'])
|
|
|
|
|
await self.stream_abandon(claim_id=claims[2]['claim_id'])
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
|
|
claims = await self.claim_search(channel_id=channel_id)
|
|
|
|
|
self.assertEqual(len(claims), 0)
|
|
|
|
|
|
|
|
|
|
tx = await self.daemon.jsonrpc_account_fund(None, None, '0.001', outputs=100, broadcast=True)
|
|
|
|
|
await self.confirm_tx(tx.id)
|
|
|
|
|
|
|
|
|
|
# 4 claims per block, 3 blocks. Sorted by height (descending) then claim_id (ascending).
|
|
|
|
|
claims = []
|
|
|
|
|
for j in range(3):
|
|
|
|
|
same_height_claims = []
|
|
|
|
|
for k in range(3):
|
|
|
|
|
claim_tx = await self.stream_create(f'c{j}-{k}', '0.000001', channel_id=channel_id, confirm=False)
|
|
|
|
|
same_height_claims.append(claim_tx['outputs'][0]['claim_id'])
|
|
|
|
|
await self.on_transaction_dict(claim_tx)
|
|
|
|
|
claim_tx = await self.stream_create(f'c{j}-4', '0.000001', channel_id=channel_id, confirm=True)
|
|
|
|
|
same_height_claims.append(claim_tx['outputs'][0]['claim_id'])
|
|
|
|
|
same_height_claims.sort(key=lambda x: int(x, 16))
|
|
|
|
|
claims = same_height_claims + claims
|
|
|
|
|
|
|
|
|
|
page = await self.claim_search(page_size=20, channel_id=channel_id)
|
|
|
|
|
page_claim_ids = [item['claim_id'] for item in page]
|
|
|
|
|
self.assertEqual(page_claim_ids, claims)
|
|
|
|
|
|
|
|
|
|
page = await self.claim_search(page_size=6, channel_id=channel_id)
|
|
|
|
|
page_claim_ids = [item['claim_id'] for item in page]
|
|
|
|
|
self.assertEqual(page_claim_ids, claims[:6])
|
|
|
|
|
|
|
|
|
|
out_of_bounds = await self.claim_search(page=2, page_size=20, channel_id=channel_id)
|
|
|
|
|
self.assertEqual(out_of_bounds, [])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
|
|
async def test_abandoned_channel_with_signed_claims(self):
|
2019-03-26 03:06:36 +01:00
|
|
|
|
channel = (await self.channel_create('@abc', '1.0'))['outputs'][0]
|
|
|
|
|
orphan_claim = await self.stream_create('on-channel-claim', '0.0001', channel_id=channel['claim_id'])
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.channel_abandon(txid=channel['txid'], nout=0)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
channel = (await self.channel_create('@abc', '1.0'))['outputs'][0]
|
2019-03-24 23:53:58 +01:00
|
|
|
|
orphan_claim_id = orphan_claim['outputs'][0]['claim_id']
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
|
|
# Original channel doesnt exists anymore, so the signature is invalid. For invalid signatures, resolution is
|
|
|
|
|
# only possible outside a channel
|
|
|
|
|
response = await self.resolve('lbry://@abc/on-channel-claim')
|
|
|
|
|
self.assertNotIn('claim', response['lbry://@abc/on-channel-claim'])
|
|
|
|
|
response = await self.resolve('lbry://on-channel-claim')
|
2019-04-03 00:09:17 +02:00
|
|
|
|
self.assertIs(False, response['lbry://on-channel-claim']['claim']['signature_is_valid'])
|
2019-03-24 23:53:58 +01:00
|
|
|
|
direct_uri = 'lbry://on-channel-claim#' + orphan_claim_id
|
2019-02-11 23:45:52 +01:00
|
|
|
|
response = await self.resolve(direct_uri)
|
2019-04-03 00:09:17 +02:00
|
|
|
|
self.assertIs(False, response[direct_uri]['claim']['signature_is_valid'])
|
2019-03-30 02:41:24 +01:00
|
|
|
|
await self.stream_abandon(claim_id=orphan_claim_id)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
|
|
uri = 'lbry://@abc/on-channel-claim'
|
|
|
|
|
# now, claim something on this channel (it will update the invalid claim, but we save and forcefully restore)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
valid_claim = await self.stream_create('on-channel-claim', '0.00000001', channel_id=channel['claim_id'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
# resolves normally
|
|
|
|
|
response = await self.resolve(uri)
|
|
|
|
|
self.assertTrue(response[uri]['claim']['signature_is_valid'])
|
|
|
|
|
|
2019-03-22 23:44:17 +01:00
|
|
|
|
# ooops! claimed a valid conflict! (this happens on the wild, mostly by accident or race condition)
|
2019-03-26 03:06:36 +01:00
|
|
|
|
await self.stream_create(
|
2019-03-24 23:53:58 +01:00
|
|
|
|
'on-channel-claim', '0.00000001', channel_id=channel['claim_id'], allow_duplicate_name=True
|
2019-03-22 23:44:17 +01:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# it still resolves! but to the older claim
|
|
|
|
|
response = await self.resolve(uri)
|
|
|
|
|
self.assertTrue(response[uri]['claim']['signature_is_valid'])
|
2019-03-24 23:53:58 +01:00
|
|
|
|
self.assertEqual(response[uri]['claim']['txid'], valid_claim['txid'])
|
2019-03-26 03:06:36 +01:00
|
|
|
|
claims = (await self.daemon.jsonrpc_claim_search('on-channel-claim'))['items']
|
2019-03-22 23:44:17 +01:00
|
|
|
|
self.assertEqual(2, len(claims))
|
|
|
|
|
signer_ids = set([claim['value'].signing_channel_id for claim in claims])
|
|
|
|
|
self.assertEqual({channel['claim_id']}, signer_ids)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
2019-02-14 20:54:23 +01:00
|
|
|
|
async def test_normalization_resolution(self):
|
|
|
|
|
|
|
|
|
|
# this test assumes that the lbrycrd forks normalization at height == 250 on regtest
|
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
c1 = await self.stream_create('ΣίσυφοςfiÆ', '0.1')
|
|
|
|
|
c2 = await self.stream_create('ΣΊΣΥΦΟσFIæ', '0.2')
|
2019-02-14 20:54:23 +01:00
|
|
|
|
|
|
|
|
|
r1 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣίσυφοςfiÆ')
|
|
|
|
|
r2 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣΊΣΥΦΟσFIæ')
|
|
|
|
|
|
|
|
|
|
r1c = list(r1.values())[0]['claim']['claim_id']
|
|
|
|
|
r2c = list(r2.values())[0]['claim']['claim_id']
|
2019-03-25 04:23:06 +01:00
|
|
|
|
self.assertEqual(c1['outputs'][0]['claim_id'], r1c)
|
|
|
|
|
self.assertEqual(c2['outputs'][0]['claim_id'], r2c)
|
2019-02-14 20:54:23 +01:00
|
|
|
|
self.assertNotEqual(r1c, r2c)
|
|
|
|
|
|
|
|
|
|
await self.generate(50)
|
2019-04-27 23:15:37 +02:00
|
|
|
|
self.assertTrue(self.ledger.headers.height > 250)
|
2019-02-14 20:54:23 +01:00
|
|
|
|
|
|
|
|
|
r3 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣίσυφοςfiÆ')
|
|
|
|
|
r4 = await self.daemon.jsonrpc_resolve(urls='lbry://ΣΊΣΥΦΟσFIæ')
|
|
|
|
|
|
|
|
|
|
r3c = list(r3.values())[0]['claim']['claim_id']
|
|
|
|
|
r4c = list(r4.values())[0]['claim']['claim_id']
|
|
|
|
|
r3n = list(r3.values())[0]['claim']['name']
|
|
|
|
|
r4n = list(r4.values())[0]['claim']['name']
|
|
|
|
|
|
2019-03-25 04:23:06 +01:00
|
|
|
|
self.assertEqual(c2['outputs'][0]['claim_id'], r3c)
|
|
|
|
|
self.assertEqual(c2['outputs'][0]['claim_id'], r4c)
|
2019-02-14 20:54:23 +01:00
|
|
|
|
self.assertEqual(r3c, r4c)
|
|
|
|
|
self.assertEqual(r3n, r4n)
|
2019-03-23 05:07:22 +01:00
|
|
|
|
|
|
|
|
|
async def test_resolve_old_claim(self):
|
2019-03-25 03:27:34 +01:00
|
|
|
|
channel = await self.daemon.jsonrpc_channel_create('@olds', '1.0')
|
|
|
|
|
await self.confirm_tx(channel.id)
|
|
|
|
|
address = channel.outputs[0].get_address(self.account.ledger)
|
2019-04-04 05:15:16 +02:00
|
|
|
|
claim = generate_signed_legacy(address, channel.outputs[0])
|
2019-03-25 03:27:34 +01:00
|
|
|
|
tx = await Transaction.claim_create('example', claim.SerializeToString(), 1, address, [self.account], self.account)
|
2019-03-23 05:07:22 +01:00
|
|
|
|
await tx.sign([self.account])
|
|
|
|
|
await self.broadcast(tx)
|
2019-03-23 05:32:07 +01:00
|
|
|
|
await self.confirm_tx(tx.id)
|
2019-03-23 05:07:22 +01:00
|
|
|
|
|
|
|
|
|
response = await self.daemon.jsonrpc_resolve(urls='@olds/example')
|
|
|
|
|
self.assertTrue(response['@olds/example']['claim']['signature_is_valid'])
|
|
|
|
|
|
|
|
|
|
claim.publisherSignature.signature = bytes(reversed(claim.publisherSignature.signature))
|
2019-03-25 03:27:34 +01:00
|
|
|
|
tx = await Transaction.claim_create(
|
2019-03-23 05:07:22 +01:00
|
|
|
|
'bad_example', claim.SerializeToString(), 1, address, [self.account], self.account
|
|
|
|
|
)
|
|
|
|
|
await tx.sign([self.account])
|
|
|
|
|
await self.broadcast(tx)
|
2019-03-23 05:32:07 +01:00
|
|
|
|
await self.confirm_tx(tx.id)
|
2019-03-23 05:07:22 +01:00
|
|
|
|
|
|
|
|
|
response = await self.daemon.jsonrpc_resolve(urls='bad_example')
|
2019-04-04 05:15:16 +02:00
|
|
|
|
self.assertIs(False, response['bad_example']['claim']['signature_is_valid'], response)
|
2019-03-23 05:07:22 +01:00
|
|
|
|
response = await self.daemon.jsonrpc_resolve(urls='@olds/bad_example')
|
|
|
|
|
self.assertEqual('URI lbry://@olds/bad_example cannot be resolved', response['@olds/bad_example']['error'])
|
|
|
|
|
|
|
|
|
|
|
2019-04-04 05:15:16 +02:00
|
|
|
|
def generate_signed_legacy(address: bytes, output: Output):
|
2019-03-23 05:07:22 +01:00
|
|
|
|
decoded_address = Base58.decode(address)
|
|
|
|
|
claim = OldClaimMessage()
|
|
|
|
|
claim.ParseFromString(unhexlify(
|
|
|
|
|
'080110011aee04080112a604080410011a2b4865726520617265203520526561736f6e73204920e29da4e'
|
|
|
|
|
'fb88f204e657874636c6f7564207c20544c4722920346696e64206f7574206d6f72652061626f7574204e'
|
|
|
|
|
'657874636c6f75643a2068747470733a2f2f6e657874636c6f75642e636f6d2f0a0a596f752063616e206'
|
|
|
|
|
'6696e64206d65206f6e20746865736520736f6369616c733a0a202a20466f72756d733a2068747470733a'
|
|
|
|
|
'2f2f666f72756d2e6865617679656c656d656e742e696f2f0a202a20506f64636173743a2068747470733'
|
|
|
|
|
'a2f2f6f6666746f706963616c2e6e65740a202a2050617472656f6e3a2068747470733a2f2f7061747265'
|
|
|
|
|
'6f6e2e636f6d2f7468656c696e757867616d65720a202a204d657263683a2068747470733a2f2f7465657'
|
|
|
|
|
'37072696e672e636f6d2f73746f7265732f6f6666696369616c2d6c696e75782d67616d65720a202a2054'
|
|
|
|
|
'77697463683a2068747470733a2f2f7477697463682e74762f786f6e64616b0a202a20547769747465723'
|
|
|
|
|
'a2068747470733a2f2f747769747465722e636f6d2f7468656c696e757867616d65720a0a2e2e2e0a6874'
|
|
|
|
|
'7470733a2f2f7777772e796f75747562652e636f6d2f77617463683f763d4672546442434f535f66632a0'
|
|
|
|
|
'f546865204c696e75782047616d6572321c436f7079726967687465642028636f6e746163742061757468'
|
|
|
|
|
'6f722938004a2968747470733a2f2f6265726b2e6e696e6a612f7468756d626e61696c732f46725464424'
|
|
|
|
|
'34f535f666352005a001a41080110011a30040e8ac6e89c061f982528c23ad33829fd7146435bf7a4cc22'
|
|
|
|
|
'f0bff70c4fe0b91fd36da9a375e3e1c171db825bf5d1f32209766964656f2f6d70342a5c080110031a406'
|
|
|
|
|
'2b2dd4c45e364030fbfad1a6fefff695ebf20ea33a5381b947753e2a0ca359989a5cc7d15e5392a0d354c'
|
|
|
|
|
'0b68498382b2701b22c03beb8dcb91089031b871e72214feb61536c007cdf4faeeaab4876cb397feaf6b51'
|
|
|
|
|
))
|
|
|
|
|
claim.ClearField("publisherSignature")
|
|
|
|
|
digest = sha256(b''.join([
|
|
|
|
|
decoded_address,
|
|
|
|
|
claim.SerializeToString(),
|
2019-04-04 05:15:16 +02:00
|
|
|
|
output.claim_hash[::-1]
|
2019-03-23 05:07:22 +01:00
|
|
|
|
]))
|
|
|
|
|
private_key = ecdsa.SigningKey.from_pem(output.private_key, hashfunc=hashlib.sha256)
|
|
|
|
|
signature = private_key.sign_digest_deterministic(digest, hashfunc=hashlib.sha256)
|
|
|
|
|
claim.publisherSignature.version = 1
|
|
|
|
|
claim.publisherSignature.signatureType = 1
|
|
|
|
|
claim.publisherSignature.signature = signature
|
2019-04-04 05:15:16 +02:00
|
|
|
|
claim.publisherSignature.certificateId = output.claim_hash[::-1]
|
2019-03-26 03:06:36 +01:00
|
|
|
|
return claim
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class SupportCommands(CommandTestCase):
|
|
|
|
|
|
|
|
|
|
async def test_regular_supports_and_tip_supports(self):
|
|
|
|
|
# account2 will be used to send tips and supports to account1
|
2019-04-06 21:55:08 +02:00
|
|
|
|
account2_id = (await self.out(self.daemon.jsonrpc_account_create('second account')))['id']
|
2019-03-26 03:06:36 +01:00
|
|
|
|
account2 = self.daemon.get_account_or_error(account2_id)
|
|
|
|
|
|
|
|
|
|
# send account2 5 LBC out of the 10 LBC in account1
|
|
|
|
|
result = await self.out(self.daemon.jsonrpc_account_send(
|
|
|
|
|
'5.0', await self.daemon.jsonrpc_address_unused(account2_id)
|
|
|
|
|
))
|
|
|
|
|
await self.on_transaction_dict(result)
|
|
|
|
|
|
|
|
|
|
# account1 and account2 balances:
|
|
|
|
|
await self.assertBalance(self.account, '4.999876')
|
|
|
|
|
await self.assertBalance(account2, '5.0')
|
|
|
|
|
|
|
|
|
|
# create the claim we'll be tipping and supporting
|
|
|
|
|
tx = await self.stream_create()
|
|
|
|
|
claim_id = tx['outputs'][0]['claim_id']
|
|
|
|
|
|
|
|
|
|
# account1 and account2 balances:
|
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
|
await self.assertBalance(account2, '5.0')
|
|
|
|
|
|
|
|
|
|
# send a tip to the claim using account2
|
|
|
|
|
tip = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_support_create(claim_id, '1.0', True, account2_id)
|
|
|
|
|
)
|
|
|
|
|
await self.on_transaction_dict(tip)
|
|
|
|
|
await self.generate(1)
|
|
|
|
|
await self.on_transaction_dict(tip)
|
|
|
|
|
|
|
|
|
|
# tips don't affect balance so account1 balance is same but account2 balance went down
|
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
|
await self.assertBalance(account2, '3.9998585')
|
|
|
|
|
|
|
|
|
|
# verify that the incoming tip is marked correctly as is_tip=True in account1
|
|
|
|
|
txs = await self.out(self.daemon.jsonrpc_transaction_list())
|
|
|
|
|
self.assertEqual(len(txs[0]['support_info']), 1)
|
|
|
|
|
self.assertEqual(txs[0]['support_info'][0]['balance_delta'], '1.0')
|
|
|
|
|
self.assertEqual(txs[0]['support_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs[0]['support_info'][0]['is_tip'], True)
|
|
|
|
|
self.assertEqual(txs[0]['value'], '1.0')
|
|
|
|
|
self.assertEqual(txs[0]['fee'], '0.0')
|
|
|
|
|
|
|
|
|
|
# verify that the outgoing tip is marked correctly as is_tip=True in account2
|
|
|
|
|
txs2 = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_transaction_list(account2_id)
|
|
|
|
|
)
|
|
|
|
|
self.assertEqual(len(txs2[0]['support_info']), 1)
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['balance_delta'], '-1.0')
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['is_tip'], True)
|
|
|
|
|
self.assertEqual(txs2[0]['value'], '-1.0')
|
|
|
|
|
self.assertEqual(txs2[0]['fee'], '-0.0001415')
|
|
|
|
|
|
|
|
|
|
# send a support to the claim using account2
|
|
|
|
|
support = await self.out(
|
|
|
|
|
self.daemon.jsonrpc_support_create(claim_id, '2.0', False, account2_id)
|
|
|
|
|
)
|
|
|
|
|
await self.on_transaction_dict(support)
|
|
|
|
|
await self.generate(1)
|
|
|
|
|
await self.on_transaction_dict(support)
|
|
|
|
|
|
|
|
|
|
# account2 balance went down ~2
|
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
|
await self.assertBalance(account2, '1.999717')
|
|
|
|
|
|
|
|
|
|
# verify that the outgoing support is marked correctly as is_tip=False in account2
|
|
|
|
|
txs2 = await self.out(self.daemon.jsonrpc_transaction_list(account2_id))
|
|
|
|
|
self.assertEqual(len(txs2[0]['support_info']), 1)
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['balance_delta'], '-2.0')
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['claim_id'], claim_id)
|
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['is_tip'], False)
|
|
|
|
|
self.assertEqual(txs2[0]['value'], '0.0')
|
|
|
|
|
self.assertEqual(txs2[0]['fee'], '-0.0001415')
|
|
|
|
|
|
|
|
|
|
|