2019-04-21 05:54:34 +02:00
|
|
|
import os.path
|
2019-03-27 21:02:17 +01:00
|
|
|
import tempfile
|
2019-04-21 05:54:34 +02:00
|
|
|
import logging
|
2020-04-06 01:27:13 +02:00
|
|
|
import asyncio
|
2019-03-22 23:44:17 +01:00
|
|
|
from binascii import unhexlify
|
2021-01-19 08:37:31 +01:00
|
|
|
from unittest import skip
|
2019-04-21 05:54:34 +02:00
|
|
|
from urllib.request import urlopen
|
2021-08-31 13:54:04 +02:00
|
|
|
import ecdsa
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-12-31 21:30:13 +01:00
|
|
|
from lbry.error import InsufficientFundsError
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2020-01-03 07:44:22 +01:00
|
|
|
from lbry.extras.daemon.daemon import DEFAULT_PAGE_SIZE
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.testcase import CommandTestCase
|
2021-02-04 22:49:30 +01:00
|
|
|
from lbry.wallet.orchstr8.node import SPVNode
|
2021-08-31 13:54:04 +02:00
|
|
|
from lbry.wallet.transaction import Transaction, Output
|
2020-03-26 06:13:09 +01:00
|
|
|
from lbry.wallet.util import satoshis_to_coins as lbc
|
2021-08-31 13:54:04 +02:00
|
|
|
from lbry.crypto.hash import sha256
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
2021-08-31 13:54:04 +02:00
|
|
|
|
2022-03-05 22:41:46 +01:00
|
|
|
STREAM_TYPES = {
|
|
|
|
'video': 1,
|
|
|
|
'audio': 2,
|
|
|
|
'image': 3,
|
|
|
|
'document': 4,
|
|
|
|
'binary': 5,
|
|
|
|
'model': 6,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-08-31 13:54:04 +02:00
|
|
|
def verify(channel, data, signature, channel_hash=None):
|
|
|
|
pieces = [
|
|
|
|
signature['signing_ts'].encode(),
|
|
|
|
channel_hash or channel.claim_hash,
|
|
|
|
data
|
|
|
|
]
|
|
|
|
return Output.is_signature_valid(
|
2021-12-19 22:07:01 +01:00
|
|
|
unhexlify(signature['signature']),
|
2021-08-31 13:54:04 +02:00
|
|
|
sha256(b''.join(pieces)),
|
|
|
|
channel.claim.channel.public_key_bytes
|
|
|
|
)
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
|
2019-06-04 06:10:59 +02:00
|
|
|
class ClaimTestCase(CommandTestCase):
|
2019-11-14 04:14:20 +01:00
|
|
|
|
2019-06-04 06:10:59 +02:00
|
|
|
files_directory = os.path.join(os.path.dirname(__file__), 'files')
|
|
|
|
video_file_url = 'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ForBiggerEscapes.mp4'
|
|
|
|
video_file_name = os.path.join(files_directory, 'ForBiggerEscapes.mp4')
|
2019-10-25 05:32:32 +02:00
|
|
|
image_data = unhexlify(
|
|
|
|
b'89504e470d0a1a0a0000000d49484452000000050000000708020000004fc'
|
|
|
|
b'510b9000000097048597300000b1300000b1301009a9c1800000015494441'
|
|
|
|
b'5408d763fcffff3f031260624005d4e603004c45030b5286e9ea000000004'
|
|
|
|
b'9454e44ae426082'
|
|
|
|
)
|
2019-06-04 06:10:59 +02:00
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
if not os.path.exists(self.video_file_name):
|
|
|
|
if not os.path.exists(self.files_directory):
|
|
|
|
os.mkdir(self.files_directory)
|
|
|
|
log.info(f'downloading test video from {self.video_file_name}')
|
|
|
|
with urlopen(self.video_file_url) as response, \
|
|
|
|
open(self.video_file_name, 'wb') as video_file:
|
|
|
|
video_file.write(response.read())
|
|
|
|
|
|
|
|
|
|
|
|
class ClaimSearchCommand(ClaimTestCase):
|
2019-05-06 04:25:43 +02:00
|
|
|
|
|
|
|
async def create_channel(self):
|
|
|
|
self.channel = await self.channel_create('@abc', '1.0')
|
2019-06-24 01:58:41 +02:00
|
|
|
self.channel_id = self.get_claim_id(self.channel)
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
async def create_lots_of_streams(self):
|
2019-05-06 04:25:43 +02:00
|
|
|
tx = await self.daemon.jsonrpc_account_fund(None, None, '0.001', outputs=100, broadcast=True)
|
|
|
|
await self.confirm_tx(tx.id)
|
2019-10-26 05:34:44 +02:00
|
|
|
# 4 claims per block, 3 blocks. Sorted by height (descending) then claim name (ascending).
|
2019-05-06 04:25:43 +02:00
|
|
|
self.streams = []
|
2019-10-26 05:34:44 +02:00
|
|
|
for j in range(4):
|
2019-05-06 04:25:43 +02:00
|
|
|
same_height_claims = []
|
2019-10-26 05:34:44 +02:00
|
|
|
for k in range(5):
|
2019-05-06 04:25:43 +02:00
|
|
|
claim_tx = await self.stream_create(
|
|
|
|
f'c{j}-{k}', '0.000001', channel_id=self.channel_id, confirm=False)
|
|
|
|
same_height_claims.append(claim_tx['outputs'][0]['name'])
|
|
|
|
await self.on_transaction_dict(claim_tx)
|
|
|
|
claim_tx = await self.stream_create(
|
2019-10-26 05:34:44 +02:00
|
|
|
f'c{j}-6', '0.000001', channel_id=self.channel_id, confirm=True)
|
2019-05-06 04:25:43 +02:00
|
|
|
same_height_claims.append(claim_tx['outputs'][0]['name'])
|
|
|
|
self.streams = same_height_claims + self.streams
|
|
|
|
|
|
|
|
async def assertFindsClaim(self, claim, **kwargs):
|
|
|
|
await self.assertFindsClaims([claim], **kwargs)
|
|
|
|
|
|
|
|
async def assertFindsClaims(self, claims, **kwargs):
|
2019-07-20 19:30:41 +02:00
|
|
|
kwargs.setdefault('order_by', ['height', '^name'])
|
2019-05-06 04:25:43 +02:00
|
|
|
results = await self.claim_search(**kwargs)
|
2021-07-22 05:39:36 +02:00
|
|
|
self.assertEqual(
|
|
|
|
len(claims), len(results),
|
|
|
|
f"{[claim['outputs'][0]['name'] for claim in claims]} != {[result['name'] for result in results]}")
|
2019-05-06 04:25:43 +02:00
|
|
|
for claim, result in zip(claims, results):
|
|
|
|
self.assertEqual(
|
2019-06-24 01:58:41 +02:00
|
|
|
(claim['txid'], self.get_claim_id(claim)),
|
2021-01-20 00:38:03 +01:00
|
|
|
(result['txid'], result['claim_id']),
|
2021-04-14 17:16:49 +02:00
|
|
|
f"(expected {claim['outputs'][0]['name']}) != (got {result['name']})"
|
2019-05-06 04:25:43 +02:00
|
|
|
)
|
|
|
|
|
2021-04-23 16:01:48 +02:00
|
|
|
async def assertListsClaims(self, claims, **kwargs):
|
|
|
|
kwargs.setdefault('order_by', 'height')
|
|
|
|
results = await self.claim_list(**kwargs)
|
|
|
|
self.assertEqual(len(claims), len(results))
|
|
|
|
for claim, result in zip(claims, results):
|
|
|
|
self.assertEqual(
|
|
|
|
(claim['txid'], self.get_claim_id(claim)),
|
|
|
|
(result['txid'], result['claim_id']),
|
|
|
|
f"(expected {claim['outputs'][0]['name']}) != (got {result['name']})"
|
|
|
|
)
|
|
|
|
|
2021-01-19 08:37:31 +01:00
|
|
|
@skip("doesnt happen on ES...?")
|
2020-02-20 22:08:21 +01:00
|
|
|
async def test_disconnect_on_memory_error(self):
|
|
|
|
claim_ids = [
|
|
|
|
'0000000000000000000000000000000000000000',
|
|
|
|
] * 23828
|
|
|
|
self.assertListEqual([], await self.claim_search(claim_ids=claim_ids))
|
|
|
|
|
2020-04-06 01:27:13 +02:00
|
|
|
# this should do nothing... if the resolve (which is retried) results in the server disconnecting,
|
|
|
|
# it kerplodes
|
|
|
|
await asyncio.wait_for(self.daemon.jsonrpc_resolve([
|
|
|
|
f'0000000000000000000000000000000000000000{i}' for i in range(30000)
|
|
|
|
]), 30)
|
|
|
|
|
2020-02-20 22:08:21 +01:00
|
|
|
# 23829 claim ids makes the request just large enough
|
|
|
|
claim_ids = [
|
|
|
|
'0000000000000000000000000000000000000000',
|
2020-11-05 21:02:16 +01:00
|
|
|
] * 33829
|
2020-02-20 22:08:21 +01:00
|
|
|
with self.assertRaises(ConnectionResetError):
|
|
|
|
await self.claim_search(claim_ids=claim_ids)
|
|
|
|
|
2019-05-06 04:25:43 +02:00
|
|
|
async def test_basic_claim_search(self):
|
|
|
|
await self.create_channel()
|
2019-06-03 22:37:21 +02:00
|
|
|
channel_txo = self.channel['outputs'][0]
|
2019-05-06 04:25:43 +02:00
|
|
|
channel2 = await self.channel_create('@abc', '0.1', allow_duplicate_name=True)
|
2019-06-03 22:37:21 +02:00
|
|
|
channel_txo2 = channel2['outputs'][0]
|
|
|
|
channel_id2 = channel_txo2['claim_id']
|
2019-05-06 04:25:43 +02:00
|
|
|
|
|
|
|
# finding a channel
|
|
|
|
await self.assertFindsClaims([channel2, self.channel], name='@abc')
|
|
|
|
await self.assertFindsClaim(self.channel, name='@abc', is_controlling=True)
|
|
|
|
await self.assertFindsClaim(self.channel, claim_id=self.channel_id)
|
|
|
|
await self.assertFindsClaim(self.channel, txid=self.channel['txid'], nout=0)
|
|
|
|
await self.assertFindsClaim(channel2, claim_id=channel_id2)
|
|
|
|
await self.assertFindsClaim(channel2, txid=channel2['txid'], nout=0)
|
2019-06-03 22:37:21 +02:00
|
|
|
await self.assertFindsClaim(
|
|
|
|
channel2, public_key_id=channel_txo2['value']['public_key_id'])
|
|
|
|
await self.assertFindsClaim(
|
|
|
|
self.channel, public_key_id=channel_txo['value']['public_key_id'])
|
2019-05-06 04:25:43 +02:00
|
|
|
|
|
|
|
signed = await self.stream_create('on-channel-claim', '0.001', channel_id=self.channel_id)
|
|
|
|
signed2 = await self.stream_create('on-channel-claim', '0.0001', channel_id=channel_id2,
|
|
|
|
allow_duplicate_name=True)
|
|
|
|
unsigned = await self.stream_create('unsigned', '0.0001')
|
|
|
|
|
|
|
|
# finding claims with and without a channel
|
|
|
|
await self.assertFindsClaims([signed2, signed], name='on-channel-claim')
|
2019-05-28 04:20:21 +02:00
|
|
|
await self.assertFindsClaims([signed2, signed], channel_ids=[self.channel_id, channel_id2])
|
|
|
|
await self.assertFindsClaim(signed, name='on-channel-claim', channel_ids=[self.channel_id])
|
|
|
|
await self.assertFindsClaim(signed2, name='on-channel-claim', channel_ids=[channel_id2])
|
2019-05-06 04:25:43 +02:00
|
|
|
await self.assertFindsClaim(unsigned, name='unsigned')
|
|
|
|
await self.assertFindsClaim(unsigned, txid=unsigned['txid'], nout=0)
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.assertFindsClaim(unsigned, claim_id=self.get_claim_id(unsigned))
|
2019-05-06 04:25:43 +02:00
|
|
|
|
|
|
|
two = await self.stream_create('on-channel-claim-2', '0.0001', channel_id=self.channel_id)
|
|
|
|
three = await self.stream_create('on-channel-claim-3', '0.0001', channel_id=self.channel_id)
|
|
|
|
|
|
|
|
# three streams in channel, zero streams in abandoned channel
|
|
|
|
claims = [three, two, signed]
|
2019-05-28 04:20:21 +02:00
|
|
|
await self.assertFindsClaims(claims, channel_ids=[self.channel_id])
|
2021-05-07 17:31:28 +02:00
|
|
|
await self.assertFindsClaims(claims, channel=f"@abc#{self.channel_id}")
|
2021-06-18 03:22:23 +02:00
|
|
|
await self.assertFindsClaims(claims, channel=f"@abc#{self.channel_id}", valid_channel_signature=True)
|
|
|
|
await self.assertFindsClaims(claims, channel=f"@abc#{self.channel_id}", has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
await self.assertFindsClaims([], channel=f"@abc#{self.channel_id}", has_channel_signature=True, invalid_channel_signature=True) # fixme
|
2021-03-11 08:04:26 +01:00
|
|
|
await self.assertFindsClaims([], channel=f"@inexistent")
|
2019-05-28 04:20:21 +02:00
|
|
|
await self.assertFindsClaims([three, two, signed2, signed], channel_ids=[channel_id2, self.channel_id])
|
2019-05-06 04:25:43 +02:00
|
|
|
await self.channel_abandon(claim_id=self.channel_id)
|
2021-05-07 17:31:28 +02:00
|
|
|
await self.assertFindsClaims([], channel=f"@abc#{self.channel_id}", valid_channel_signature=True)
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.assertFindsClaims([], channel_ids=[self.channel_id], valid_channel_signature=True)
|
|
|
|
await self.assertFindsClaims([signed2], channel_ids=[channel_id2], valid_channel_signature=True)
|
|
|
|
# pass `invalid_channel_signature=False` to catch a bug in argument processing
|
2019-05-28 04:20:21 +02:00
|
|
|
await self.assertFindsClaims([signed2], channel_ids=[channel_id2, self.channel_id],
|
2019-06-24 01:58:41 +02:00
|
|
|
valid_channel_signature=True, invalid_channel_signature=False)
|
2019-06-04 20:16:11 +02:00
|
|
|
# invalid signature still returns channel_id
|
|
|
|
self.ledger._tx_cache.clear()
|
2019-06-24 01:58:41 +02:00
|
|
|
invalid_claims = await self.claim_search(invalid_channel_signature=True, has_channel_signature=True)
|
2019-06-04 20:16:11 +02:00
|
|
|
self.assertEqual(3, len(invalid_claims))
|
|
|
|
self.assertTrue(all([not c['is_channel_signature_valid'] for c in invalid_claims]))
|
|
|
|
self.assertEqual({'channel_id': self.channel_id}, invalid_claims[0]['signing_channel'])
|
|
|
|
|
2019-06-24 01:58:41 +02:00
|
|
|
valid_claims = await self.claim_search(valid_channel_signature=True, has_channel_signature=True)
|
2019-06-04 20:16:11 +02:00
|
|
|
self.assertEqual(1, len(valid_claims))
|
|
|
|
self.assertTrue(all([c['is_channel_signature_valid'] for c in valid_claims]))
|
|
|
|
self.assertEqual('@abc', valid_claims[0]['signing_channel']['name'])
|
2019-05-06 04:25:43 +02:00
|
|
|
|
|
|
|
# abandoned stream won't show up for streams in channel search
|
|
|
|
await self.stream_abandon(txid=signed2['txid'], nout=0)
|
2019-05-28 04:20:21 +02:00
|
|
|
await self.assertFindsClaims([], channel_ids=[channel_id2])
|
2021-02-10 01:38:41 +01:00
|
|
|
# resolve by claim ids
|
|
|
|
await self.assertFindsClaims([three, two], claim_ids=[self.get_claim_id(three), self.get_claim_id(two)])
|
2021-02-13 06:16:49 +01:00
|
|
|
await self.assertFindsClaims([three], claim_id=self.get_claim_id(three))
|
|
|
|
await self.assertFindsClaims([three], claim_id=self.get_claim_id(three), text='*')
|
2021-12-08 08:04:04 +01:00
|
|
|
# resolve by sd hash
|
|
|
|
two_sd_hash = two['outputs'][0]['value']['source']['sd_hash']
|
|
|
|
await self.assertFindsClaims([two], sd_hash=two_sd_hash)
|
2021-12-08 08:27:57 +01:00
|
|
|
await self.assertFindsClaims([two], sd_hash=two_sd_hash[:2])
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2021-03-11 04:45:42 +01:00
|
|
|
async def test_source_filter(self):
|
2021-03-26 04:27:05 +01:00
|
|
|
channel = await self.channel_create('@abc')
|
2021-03-25 08:46:21 +01:00
|
|
|
no_source = await self.stream_create('no-source', data=None)
|
2021-03-11 04:45:42 +01:00
|
|
|
normal = await self.stream_create('normal', data=b'normal')
|
2021-03-25 08:46:21 +01:00
|
|
|
normal_repost = await self.stream_repost(self.get_claim_id(normal), 'normal-repost')
|
|
|
|
no_source_repost = await self.stream_repost(self.get_claim_id(no_source), 'no-source-repost')
|
2021-05-03 23:40:03 +02:00
|
|
|
channel_repost = await self.stream_repost(self.get_claim_id(channel), 'channel-repost')
|
2021-04-16 09:35:12 +02:00
|
|
|
await self.assertFindsClaims([channel_repost, no_source_repost, no_source, channel], has_no_source=True)
|
2021-04-23 16:01:48 +02:00
|
|
|
await self.assertListsClaims([no_source, channel], has_no_source=True)
|
2021-04-16 09:35:12 +02:00
|
|
|
await self.assertFindsClaims([channel_repost, normal_repost, normal, channel], has_source=True)
|
2021-05-03 23:40:03 +02:00
|
|
|
await self.assertListsClaims([channel_repost, no_source_repost, normal_repost, normal], has_source=True)
|
2021-04-16 09:35:12 +02:00
|
|
|
await self.assertFindsClaims([channel_repost, no_source_repost, normal_repost, normal, no_source, channel])
|
2021-05-03 23:40:03 +02:00
|
|
|
await self.assertListsClaims([channel_repost, no_source_repost, normal_repost, normal, no_source, channel])
|
2021-11-17 06:48:29 +01:00
|
|
|
await self.assertFindsClaims([normal_repost, normal], stream_types=list(STREAM_TYPES.keys()))
|
2021-03-11 04:45:42 +01:00
|
|
|
|
2019-05-06 04:25:43 +02:00
|
|
|
async def test_pagination(self):
|
|
|
|
await self.create_channel()
|
2019-10-26 05:34:44 +02:00
|
|
|
await self.create_lots_of_streams()
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# with and without totals
|
|
|
|
results = await self.daemon.jsonrpc_claim_search()
|
|
|
|
self.assertEqual(results['total_pages'], 2)
|
|
|
|
self.assertEqual(results['total_items'], 25)
|
|
|
|
results = await self.daemon.jsonrpc_claim_search(no_totals=True)
|
|
|
|
self.assertNotIn('total_pages', results)
|
|
|
|
self.assertNotIn('total_items', results)
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# defaults
|
|
|
|
page = await self.claim_search(channel='@abc', order_by=['height', '^name'])
|
|
|
|
page_claim_ids = [item['name'] for item in page]
|
2019-10-18 01:01:40 +02:00
|
|
|
self.assertEqual(page_claim_ids, self.streams[:DEFAULT_PAGE_SIZE])
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# page with default page_size
|
2019-10-18 01:01:40 +02:00
|
|
|
page = await self.claim_search(page=2, channel='@abc', order_by=['height', '^name'])
|
2019-10-26 05:34:44 +02:00
|
|
|
page_claim_ids = [item['name'] for item in page]
|
|
|
|
self.assertEqual(page_claim_ids, self.streams[DEFAULT_PAGE_SIZE:(DEFAULT_PAGE_SIZE*2)])
|
2019-07-08 05:08:39 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# page_size larger than dataset
|
|
|
|
page = await self.claim_search(page_size=50, channel='@abc', order_by=['height', '^name'])
|
|
|
|
page_claim_ids = [item['name'] for item in page]
|
|
|
|
self.assertEqual(page_claim_ids, self.streams)
|
2019-10-18 01:01:40 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# page_size less than dataset
|
|
|
|
page = await self.claim_search(page_size=6, channel='@abc', order_by=['height', '^name'])
|
|
|
|
page_claim_ids = [item['name'] for item in page]
|
|
|
|
self.assertEqual(page_claim_ids, self.streams[:6])
|
2019-10-18 01:01:40 +02:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
# page and page_size
|
|
|
|
page = await self.claim_search(page=2, page_size=6, channel='@abc', order_by=['height', '^name'])
|
|
|
|
page_claim_ids = [item['name'] for item in page]
|
|
|
|
self.assertEqual(page_claim_ids, self.streams[6:12])
|
|
|
|
|
|
|
|
out_of_bounds = await self.claim_search(page=4, page_size=20, channel='@abc')
|
|
|
|
self.assertEqual(out_of_bounds, [])
|
2019-07-08 05:08:39 +02:00
|
|
|
|
2019-05-06 04:25:43 +02:00
|
|
|
async def test_tag_search(self):
|
2019-06-23 02:11:33 +02:00
|
|
|
claim1 = await self.stream_create('claim1', tags=['aBc'])
|
|
|
|
claim2 = await self.stream_create('claim2', tags=['#abc', 'def'])
|
2019-05-06 04:25:43 +02:00
|
|
|
claim3 = await self.stream_create('claim3', tags=['abc', 'ghi', 'jkl'])
|
2019-06-23 02:11:33 +02:00
|
|
|
claim4 = await self.stream_create('claim4', tags=['abc\t', 'ghi', 'mno'])
|
2019-05-06 04:25:43 +02:00
|
|
|
claim5 = await self.stream_create('claim5', tags=['pqr'])
|
|
|
|
|
|
|
|
# any_tags
|
2019-06-23 02:11:33 +02:00
|
|
|
await self.assertFindsClaims([claim5, claim4, claim3, claim2, claim1], any_tags=['\tabc', 'pqr'])
|
2019-05-06 04:25:43 +02:00
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2, claim1], any_tags=['abc'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2, claim1], any_tags=['abc', 'ghi'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3], any_tags=['ghi'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3], any_tags=['ghi', 'xyz'])
|
|
|
|
await self.assertFindsClaims([], any_tags=['xyz'])
|
|
|
|
|
|
|
|
# all_tags
|
|
|
|
await self.assertFindsClaims([], all_tags=['abc', 'pqr'])
|
2019-06-23 02:11:33 +02:00
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2, claim1], all_tags=['ABC'])
|
2019-05-06 04:25:43 +02:00
|
|
|
await self.assertFindsClaims([claim4, claim3], all_tags=['abc', 'ghi'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3], all_tags=['ghi'])
|
|
|
|
await self.assertFindsClaims([], all_tags=['ghi', 'xyz'])
|
|
|
|
await self.assertFindsClaims([], all_tags=['xyz'])
|
|
|
|
|
2019-05-06 22:37:17 +02:00
|
|
|
# not_tags
|
|
|
|
await self.assertFindsClaims([], not_tags=['abc', 'pqr'])
|
2019-06-23 02:11:33 +02:00
|
|
|
await self.assertFindsClaims([claim5], not_tags=['abC'])
|
2019-05-06 22:37:17 +02:00
|
|
|
await self.assertFindsClaims([claim5], not_tags=['abc', 'ghi'])
|
|
|
|
await self.assertFindsClaims([claim5, claim2, claim1], not_tags=['ghi'])
|
|
|
|
await self.assertFindsClaims([claim5, claim2, claim1], not_tags=['ghi', 'xyz'])
|
|
|
|
await self.assertFindsClaims([claim5, claim4, claim3, claim2, claim1], not_tags=['xyz'])
|
|
|
|
|
|
|
|
# combinations
|
|
|
|
await self.assertFindsClaims([claim3], all_tags=['abc', 'ghi'], not_tags=['mno'])
|
|
|
|
await self.assertFindsClaims([claim3], all_tags=['abc', 'ghi'], any_tags=['jkl'], not_tags=['mno'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2], all_tags=['abc'], any_tags=['def', 'ghi'])
|
|
|
|
|
2019-05-18 05:54:03 +02:00
|
|
|
async def test_order_by(self):
|
2019-06-26 08:41:35 +02:00
|
|
|
height = self.ledger.network.remote_height
|
2019-05-18 05:54:03 +02:00
|
|
|
claims = [await self.stream_create(f'claim{i}') for i in range(5)]
|
|
|
|
|
|
|
|
await self.assertFindsClaims(claims, order_by=["^height"])
|
|
|
|
await self.assertFindsClaims(list(reversed(claims)), order_by=["height"])
|
|
|
|
|
|
|
|
await self.assertFindsClaims([claims[0]], height=height+1)
|
|
|
|
await self.assertFindsClaims([claims[4]], height=height+5)
|
|
|
|
await self.assertFindsClaims(claims[:1], height=f'<{height+2}', order_by=["^height"])
|
|
|
|
await self.assertFindsClaims(claims[:2], height=f'<={height+2}', order_by=["^height"])
|
|
|
|
await self.assertFindsClaims(claims[2:], height=f'>{height+2}', order_by=["^height"])
|
|
|
|
await self.assertFindsClaims(claims[1:], height=f'>={height+2}', order_by=["^height"])
|
|
|
|
|
|
|
|
await self.assertFindsClaims(claims, order_by=["^name"])
|
|
|
|
|
2019-06-23 04:25:22 +02:00
|
|
|
async def test_search_by_fee(self):
|
|
|
|
claim1 = await self.stream_create('claim1', fee_amount='1.0', fee_currency='lbc')
|
|
|
|
claim2 = await self.stream_create('claim2', fee_amount='0.9', fee_currency='lbc')
|
|
|
|
claim3 = await self.stream_create('claim3', fee_amount='0.5', fee_currency='lbc')
|
|
|
|
claim4 = await self.stream_create('claim4', fee_amount='0.1', fee_currency='lbc')
|
|
|
|
claim5 = await self.stream_create('claim5', fee_amount='1.0', fee_currency='usd')
|
2021-08-04 22:57:45 +02:00
|
|
|
repost1 = await self.stream_repost(self.get_claim_id(claim1), 'repost1')
|
|
|
|
repost5 = await self.stream_repost(self.get_claim_id(claim5), 'repost5')
|
2019-06-23 04:25:22 +02:00
|
|
|
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([repost5, repost1, claim5, claim4, claim3, claim2, claim1], fee_amount='>0')
|
|
|
|
await self.assertFindsClaims([repost1, claim4, claim3, claim2, claim1], fee_currency='lbc')
|
|
|
|
await self.assertFindsClaims([repost1, claim3, claim2, claim1], fee_amount='>0.1', fee_currency='lbc')
|
2019-06-23 04:25:22 +02:00
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2], fee_amount='<1.0', fee_currency='lbc')
|
|
|
|
await self.assertFindsClaims([claim3], fee_amount='0.5', fee_currency='lbc')
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([repost5, claim5], fee_currency='usd')
|
2020-09-24 19:00:18 +02:00
|
|
|
|
|
|
|
async def test_search_by_language(self):
|
|
|
|
claim1 = await self.stream_create('claim1', fee_amount='1.0', fee_currency='lbc')
|
|
|
|
claim2 = await self.stream_create('claim2', fee_amount='0.9', fee_currency='lbc')
|
|
|
|
claim3 = await self.stream_create('claim3', fee_amount='0.5', fee_currency='lbc', languages='en')
|
|
|
|
claim4 = await self.stream_create('claim4', fee_amount='0.1', fee_currency='lbc', languages='en')
|
|
|
|
claim5 = await self.stream_create('claim5', fee_amount='1.0', fee_currency='usd', languages='es')
|
|
|
|
|
|
|
|
await self.assertFindsClaims([claim4, claim3], any_languages=['en'])
|
2020-11-06 03:23:49 +01:00
|
|
|
await self.assertFindsClaims([claim2, claim1], any_languages=['none'])
|
|
|
|
await self.assertFindsClaims([claim4, claim3, claim2, claim1], any_languages=['none', 'en'])
|
2020-09-24 19:00:18 +02:00
|
|
|
await self.assertFindsClaims([claim5], any_languages=['es'])
|
|
|
|
await self.assertFindsClaims([claim5, claim4, claim3], any_languages=['en', 'es'])
|
2019-06-23 04:25:22 +02:00
|
|
|
await self.assertFindsClaims([], fee_currency='foo')
|
|
|
|
|
2019-06-24 01:58:41 +02:00
|
|
|
async def test_search_by_channel(self):
|
|
|
|
match = self.assertFindsClaims
|
|
|
|
|
|
|
|
chan1_id = self.get_claim_id(await self.channel_create('@chan1'))
|
|
|
|
chan2_id = self.get_claim_id(await self.channel_create('@chan2'))
|
|
|
|
chan3_id = self.get_claim_id(await self.channel_create('@chan3'))
|
2020-02-22 03:59:46 +01:00
|
|
|
chan4 = await self.channel_create('@chan4', '0.1')
|
2019-06-24 01:58:41 +02:00
|
|
|
|
|
|
|
claim1 = await self.stream_create('claim1')
|
|
|
|
claim2 = await self.stream_create('claim2', channel_id=chan1_id)
|
|
|
|
claim3 = await self.stream_create('claim3', channel_id=chan1_id)
|
|
|
|
claim4 = await self.stream_create('claim4', channel_id=chan2_id)
|
|
|
|
claim5 = await self.stream_create('claim5', channel_id=chan2_id)
|
|
|
|
claim6 = await self.stream_create('claim6', channel_id=chan3_id)
|
|
|
|
await self.channel_abandon(chan3_id)
|
|
|
|
|
|
|
|
# {has/valid/invalid}_channel_signature
|
|
|
|
await match([claim6, claim5, claim4, claim3, claim2], has_channel_signature=True)
|
|
|
|
await match([claim5, claim4, claim3, claim2, claim1], valid_channel_signature=True, claim_type='stream')
|
|
|
|
await match([claim6, claim1], invalid_channel_signature=True, claim_type='stream')
|
|
|
|
await match([claim5, claim4, claim3, claim2], has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
await match([claim6], has_channel_signature=True, invalid_channel_signature=True)
|
|
|
|
|
|
|
|
# not_channel_ids
|
|
|
|
await match([claim6, claim5, claim4, claim3, claim2, claim1], not_channel_ids=['abc123'], claim_type='stream')
|
|
|
|
await match([claim5, claim4, claim3, claim2, claim1], not_channel_ids=[chan3_id], claim_type='stream')
|
|
|
|
await match([claim6, claim5, claim4, claim1], not_channel_ids=[chan1_id], claim_type='stream')
|
|
|
|
await match([claim6, claim3, claim2, claim1], not_channel_ids=[chan2_id], claim_type='stream')
|
|
|
|
await match([claim6, claim1], not_channel_ids=[chan1_id, chan2_id], claim_type='stream')
|
2020-02-22 03:59:46 +01:00
|
|
|
await match([claim6, claim1, chan4], not_channel_ids=[chan1_id, chan2_id])
|
2019-06-24 01:58:41 +02:00
|
|
|
|
|
|
|
# not_channel_ids + valid_channel_signature
|
|
|
|
await match([claim5, claim4, claim3, claim2, claim1],
|
|
|
|
not_channel_ids=['abc123'], valid_channel_signature=True, claim_type='stream')
|
|
|
|
await match([claim5, claim4, claim1],
|
|
|
|
not_channel_ids=[chan1_id], valid_channel_signature=True, claim_type='stream')
|
|
|
|
await match([claim3, claim2, claim1],
|
|
|
|
not_channel_ids=[chan2_id], valid_channel_signature=True, claim_type='stream')
|
|
|
|
await match([claim1], not_channel_ids=[chan1_id, chan2_id], valid_channel_signature=True, claim_type='stream')
|
|
|
|
|
|
|
|
# not_channel_ids + has_channel_signature
|
|
|
|
await match([claim6, claim5, claim4, claim3, claim2], not_channel_ids=['abc123'], has_channel_signature=True)
|
|
|
|
await match([claim6, claim5, claim4], not_channel_ids=[chan1_id], has_channel_signature=True)
|
|
|
|
await match([claim6, claim3, claim2], not_channel_ids=[chan2_id], has_channel_signature=True)
|
|
|
|
await match([claim6], not_channel_ids=[chan1_id, chan2_id], has_channel_signature=True)
|
|
|
|
|
|
|
|
# not_channel_ids + has_channel_signature + valid_channel_signature
|
|
|
|
await match([claim5, claim4, claim3, claim2],
|
|
|
|
not_channel_ids=['abc123'], has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
await match([claim5, claim4],
|
|
|
|
not_channel_ids=[chan1_id], has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
await match([claim3, claim2],
|
|
|
|
not_channel_ids=[chan2_id], has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
await match([], not_channel_ids=[chan1_id, chan2_id], has_channel_signature=True, valid_channel_signature=True)
|
|
|
|
|
2020-08-19 19:59:26 +02:00
|
|
|
async def test_limit_claims_per_channel(self):
|
2020-08-19 16:51:31 +02:00
|
|
|
match = self.assertFindsClaims
|
|
|
|
chan1_id = self.get_claim_id(await self.channel_create('@chan1'))
|
|
|
|
chan2_id = self.get_claim_id(await self.channel_create('@chan2'))
|
|
|
|
claim1 = await self.stream_create('claim1')
|
|
|
|
claim2 = await self.stream_create('claim2', channel_id=chan1_id)
|
|
|
|
claim3 = await self.stream_create('claim3', channel_id=chan1_id)
|
|
|
|
claim4 = await self.stream_create('claim4', channel_id=chan1_id)
|
|
|
|
claim5 = await self.stream_create('claim5', channel_id=chan2_id)
|
|
|
|
claim6 = await self.stream_create('claim6', channel_id=chan2_id)
|
|
|
|
await match(
|
|
|
|
[claim6, claim5, claim4, claim3, claim1],
|
|
|
|
limit_claims_per_channel=2, claim_type='stream'
|
|
|
|
)
|
|
|
|
await match(
|
|
|
|
[claim6, claim5, claim4, claim3, claim2, claim1],
|
|
|
|
limit_claims_per_channel=3, claim_type='stream'
|
|
|
|
)
|
|
|
|
|
2021-05-19 08:05:51 +02:00
|
|
|
async def test_no_duplicates(self):
|
|
|
|
await self.generate(10)
|
|
|
|
match = self.assertFindsClaims
|
|
|
|
claims = []
|
|
|
|
channels = []
|
|
|
|
first = await self.stream_create('original_claim0')
|
|
|
|
second = await self.stream_create('original_claim1')
|
|
|
|
for i in range(10):
|
|
|
|
repost_id = self.get_claim_id(second if i % 2 == 0 else first)
|
|
|
|
channel = await self.channel_create(f'@chan{i}', bid='0.001')
|
|
|
|
channels.append(channel)
|
|
|
|
claims.append(
|
|
|
|
await self.stream_repost(repost_id, f'claim{i}', bid='0.001', channel_id=self.get_claim_id(channel)))
|
|
|
|
await match([first, second] + channels,
|
|
|
|
remove_duplicates=True, order_by=['^height'])
|
2021-05-19 08:20:52 +02:00
|
|
|
await match(list(reversed(channels)) + [second, first],
|
|
|
|
remove_duplicates=True, order_by=['height'])
|
|
|
|
# the original claims doesn't show up, so we pick the oldest reposts
|
|
|
|
await match([channels[0], claims[0], channels[1], claims[1]] + channels[2:],
|
|
|
|
height='>218',
|
|
|
|
remove_duplicates=True, order_by=['^height'])
|
2021-05-20 06:20:25 +02:00
|
|
|
# limit claims per channel, invert order, oldest ones are still chosen
|
|
|
|
await match(channels[2:][::-1] + [claims[1], channels[1], claims[0], channels[0]],
|
|
|
|
height='>218', limit_claims_per_channel=1,
|
|
|
|
remove_duplicates=True, order_by=['height'])
|
2021-05-19 08:05:51 +02:00
|
|
|
|
2021-04-14 17:16:49 +02:00
|
|
|
async def test_limit_claims_per_channel_across_sorted_pages(self):
|
|
|
|
await self.generate(10)
|
|
|
|
match = self.assertFindsClaims
|
|
|
|
channel_id = self.get_claim_id(await self.channel_create('@chan0'))
|
|
|
|
claims = []
|
|
|
|
first = await self.stream_create('claim0', channel_id=channel_id)
|
|
|
|
second = await self.stream_create('claim1', channel_id=channel_id)
|
|
|
|
for i in range(2, 10):
|
|
|
|
some_chan = self.get_claim_id(await self.channel_create(f'@chan{i}', bid='0.001'))
|
|
|
|
claims.append(await self.stream_create(f'claim{i}', bid='0.001', channel_id=some_chan))
|
|
|
|
last = await self.stream_create('claim10', channel_id=channel_id)
|
|
|
|
|
|
|
|
await match(
|
|
|
|
[first, second, claims[0], claims[1]], page_size=4,
|
|
|
|
limit_claims_per_channel=3, claim_type='stream', order_by=['^height']
|
|
|
|
)
|
|
|
|
# second goes out
|
|
|
|
await match(
|
|
|
|
[first, claims[0], claims[1], claims[2]], page_size=4,
|
|
|
|
limit_claims_per_channel=1, claim_type='stream', order_by=['^height']
|
|
|
|
)
|
|
|
|
# second appears, from replacement queue
|
|
|
|
await match(
|
|
|
|
[second, claims[3], claims[4], claims[5]], page_size=4, page=2,
|
|
|
|
limit_claims_per_channel=1, claim_type='stream', order_by=['^height']
|
|
|
|
)
|
|
|
|
# last is unaffected, as the limit applies per page
|
|
|
|
await match(
|
|
|
|
[claims[6], claims[7], last], page_size=4, page=3,
|
|
|
|
limit_claims_per_channel=1, claim_type='stream', order_by=['^height']
|
|
|
|
)
|
2021-05-19 07:35:11 +02:00
|
|
|
# feature disabled on 0 or negative values
|
|
|
|
for limit in [None, 0, -1]:
|
|
|
|
await match(
|
|
|
|
[first, second] + claims + [last],
|
|
|
|
limit_claims_per_channel=limit, claim_type='stream', order_by=['^height']
|
|
|
|
)
|
2021-04-14 17:16:49 +02:00
|
|
|
|
2019-06-04 06:10:59 +02:00
|
|
|
async def test_claim_type_and_media_type_search(self):
|
|
|
|
# create an invalid/unknown claim
|
|
|
|
address = await self.account.receiving.get_or_create_usable_address()
|
|
|
|
tx = await Transaction.claim_create(
|
|
|
|
'unknown', b'{"sources":{"lbry_sd_hash":""}}', 1, address, [self.account], self.account)
|
|
|
|
await tx.sign([self.account])
|
2021-12-20 20:46:01 +01:00
|
|
|
await self.broadcast_and_confirm(tx)
|
2019-06-04 06:10:59 +02:00
|
|
|
|
|
|
|
octet = await self.stream_create()
|
2019-10-25 15:57:58 +02:00
|
|
|
video = await self.stream_create('chrome', file_path=self.video_file_name)
|
|
|
|
image = await self.stream_create('blank-image', data=self.image_data, suffix='.png')
|
2021-08-04 22:57:45 +02:00
|
|
|
image_repost = await self.stream_repost(self.get_claim_id(image), 'image-repost')
|
|
|
|
video_repost = await self.stream_repost(self.get_claim_id(video), 'video-repost')
|
2021-02-16 16:27:47 +01:00
|
|
|
collection = await self.collection_create('a-collection', claims=[self.get_claim_id(video)])
|
2019-06-04 06:10:59 +02:00
|
|
|
channel = await self.channel_create()
|
|
|
|
unknown = self.sout(tx)
|
|
|
|
|
|
|
|
# claim_type
|
|
|
|
await self.assertFindsClaims([image, video, octet, unknown], claim_type='stream')
|
|
|
|
await self.assertFindsClaims([channel], claim_type='channel')
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([video_repost, image_repost], claim_type='repost')
|
2021-02-16 16:27:47 +01:00
|
|
|
await self.assertFindsClaims([collection], claim_type='collection')
|
2019-06-04 06:10:59 +02:00
|
|
|
|
|
|
|
# stream_type
|
|
|
|
await self.assertFindsClaims([octet, unknown], stream_types=['binary'])
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([video_repost, video], stream_types=['video'])
|
|
|
|
await self.assertFindsClaims([image_repost, image], stream_types=['image'])
|
|
|
|
await self.assertFindsClaims([video_repost, image_repost, image, video], stream_types=['video', 'image'])
|
2019-06-04 06:10:59 +02:00
|
|
|
|
2020-02-12 17:51:35 +01:00
|
|
|
# media_type
|
2019-06-04 06:10:59 +02:00
|
|
|
await self.assertFindsClaims([octet, unknown], media_types=['application/octet-stream'])
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([video_repost, video], media_types=['video/mp4'])
|
|
|
|
await self.assertFindsClaims([image_repost, image], media_types=['image/png'])
|
|
|
|
await self.assertFindsClaims([video_repost, image_repost, image, video], media_types=['video/mp4', 'image/png'])
|
2019-06-04 06:10:59 +02:00
|
|
|
|
2020-01-29 21:41:40 +01:00
|
|
|
# duration
|
2021-08-04 22:57:45 +02:00
|
|
|
await self.assertFindsClaims([video_repost, video], duration='>14')
|
|
|
|
await self.assertFindsClaims([video_repost, video], duration='<16')
|
|
|
|
await self.assertFindsClaims([video_repost, video], duration=15)
|
2020-01-29 22:08:20 +01:00
|
|
|
await self.assertFindsClaims([], duration='>100')
|
|
|
|
await self.assertFindsClaims([], duration='<14')
|
2020-01-29 21:41:40 +01:00
|
|
|
|
2019-11-14 20:31:49 +01:00
|
|
|
async def test_search_by_text(self):
|
|
|
|
chan1_id = self.get_claim_id(await self.channel_create('@SatoshiNakamoto'))
|
|
|
|
chan2_id = self.get_claim_id(await self.channel_create('@Bitcoin'))
|
|
|
|
chan3_id = self.get_claim_id(await self.channel_create('@IAmSatoshi'))
|
|
|
|
|
|
|
|
claim1 = await self.stream_create(
|
|
|
|
"the-real-satoshi", title="The Real Satoshi Nakamoto",
|
|
|
|
description="Documentary about the real Satoshi Nakamoto, creator of bitcoin.",
|
|
|
|
tags=['satoshi nakamoto', 'bitcoin', 'documentary']
|
|
|
|
)
|
|
|
|
claim2 = await self.stream_create(
|
|
|
|
"about-me", channel_id=chan1_id, title="Satoshi Nakamoto Autobiography",
|
|
|
|
description="I am Satoshi Nakamoto and this is my autobiography.",
|
|
|
|
tags=['satoshi nakamoto', 'bitcoin', 'documentary', 'autobiography']
|
|
|
|
)
|
|
|
|
claim3 = await self.stream_create(
|
|
|
|
"history-of-bitcoin", channel_id=chan2_id, title="History of Bitcoin",
|
|
|
|
description="History of bitcoin and its creator Satoshi Nakamoto.",
|
|
|
|
tags=['satoshi nakamoto', 'bitcoin', 'documentary', 'history']
|
|
|
|
)
|
|
|
|
claim4 = await self.stream_create(
|
|
|
|
"satoshi-conspiracies", channel_id=chan3_id, title="Satoshi Nakamoto Conspiracies",
|
|
|
|
description="Documentary detailing various conspiracies surrounding Satoshi Nakamoto.",
|
|
|
|
tags=['conspiracies', 'bitcoin', 'satoshi nakamoto']
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.assertFindsClaims([], text='cheese')
|
2019-11-14 21:32:38 +01:00
|
|
|
await self.assertFindsClaims([claim2], text='autobiography')
|
2019-11-14 20:31:49 +01:00
|
|
|
await self.assertFindsClaims([claim3], text='history')
|
|
|
|
await self.assertFindsClaims([claim4], text='conspiracy')
|
2021-01-19 22:38:31 +01:00
|
|
|
await self.assertFindsClaims([], text='conspiracy+history')
|
|
|
|
await self.assertFindsClaims([claim4, claim3], text='conspiracy|history')
|
2021-02-13 06:16:49 +01:00
|
|
|
await self.assertFindsClaims([claim1, claim4, claim2, claim3], text='documentary', order_by=[])
|
2021-01-19 22:38:31 +01:00
|
|
|
# todo: check why claim1 and claim2 order changed. used to be ...claim1, claim2...
|
2021-02-13 06:16:49 +01:00
|
|
|
await self.assertFindsClaims([claim4, claim2, claim1, claim3], text='satoshi', order_by=[])
|
2019-11-14 20:31:49 +01:00
|
|
|
|
2019-11-14 21:32:38 +01:00
|
|
|
claim2 = await self.stream_update(
|
|
|
|
self.get_claim_id(claim2), clear_tags=True, tags=['cloud'],
|
|
|
|
title="Satoshi Nakamoto Nography",
|
|
|
|
description="I am Satoshi Nakamoto and this is my nography.",
|
|
|
|
)
|
|
|
|
await self.assertFindsClaims([], text='autobiography')
|
|
|
|
await self.assertFindsClaims([claim2], text='cloud')
|
|
|
|
|
|
|
|
await self.stream_abandon(self.get_claim_id(claim2))
|
|
|
|
await self.assertFindsClaims([], text='cloud')
|
|
|
|
|
2019-05-06 04:25:43 +02:00
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
class TransactionCommands(ClaimTestCase):
|
|
|
|
|
|
|
|
async def test_transaction_list(self):
|
|
|
|
channel_id = self.get_claim_id(await self.channel_create())
|
|
|
|
await self.channel_update(channel_id, bid='0.5')
|
|
|
|
await self.channel_abandon(claim_id=channel_id)
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create())
|
|
|
|
await self.stream_update(stream_id, bid='0.5')
|
|
|
|
await self.stream_abandon(claim_id=stream_id)
|
|
|
|
|
|
|
|
r = await self.transaction_list()
|
|
|
|
self.assertEqual(7, len(r))
|
|
|
|
self.assertEqual(stream_id, r[0]['abandon_info'][0]['claim_id'])
|
|
|
|
self.assertEqual(stream_id, r[1]['update_info'][0]['claim_id'])
|
|
|
|
self.assertEqual(stream_id, r[2]['claim_info'][0]['claim_id'])
|
|
|
|
self.assertEqual(channel_id, r[3]['abandon_info'][0]['claim_id'])
|
|
|
|
self.assertEqual(channel_id, r[4]['update_info'][0]['claim_id'])
|
|
|
|
self.assertEqual(channel_id, r[5]['claim_info'][0]['claim_id'])
|
|
|
|
|
|
|
|
|
|
|
|
class TransactionOutputCommands(ClaimTestCase):
|
|
|
|
|
2021-06-01 01:50:15 +02:00
|
|
|
async def test_support_with_comment(self):
|
|
|
|
channel = self.get_claim_id(await self.channel_create('@identity'))
|
|
|
|
stream = self.get_claim_id(await self.stream_create())
|
|
|
|
support = await self.support_create(stream, channel_id=channel, comment="nice!")
|
|
|
|
self.assertEqual(support['outputs'][0]['value']['comment'], "nice!")
|
|
|
|
r, = await self.txo_list(type='support')
|
|
|
|
self.assertEqual(r['txid'], support['txid'])
|
|
|
|
self.assertEqual(r['value']['comment'], "nice!")
|
2021-06-01 02:39:46 +02:00
|
|
|
await self.support_abandon(txid=support['txid'], nout=0, blocking=True)
|
|
|
|
support = await self.support_create(stream, comment="anonymously great!")
|
|
|
|
self.assertEqual(support['outputs'][0]['value']['comment'], "anonymously great!")
|
|
|
|
r, = await self.txo_list(type='support', is_not_spent=True)
|
|
|
|
self.assertEqual(r['txid'], support['txid'])
|
|
|
|
self.assertEqual(r['value']['comment'], "anonymously great!")
|
2021-06-01 01:50:15 +02:00
|
|
|
|
2021-01-26 18:21:24 +01:00
|
|
|
async def test_txo_list_resolve_supports(self):
|
|
|
|
channel = self.get_claim_id(await self.channel_create('@identity'))
|
|
|
|
stream = self.get_claim_id(await self.stream_create())
|
|
|
|
support = await self.support_create(stream, channel_id=channel)
|
|
|
|
r, = await self.txo_list(type='support')
|
|
|
|
self.assertEqual(r['txid'], support['txid'])
|
|
|
|
self.assertNotIn('name', r['signing_channel'])
|
|
|
|
r, = await self.txo_list(type='support', resolve=True)
|
|
|
|
self.assertIn('name', r['signing_channel'])
|
|
|
|
self.assertEqual(r['signing_channel']['name'], '@identity')
|
|
|
|
|
2021-01-21 18:54:42 +01:00
|
|
|
async def test_txo_list_by_channel_filtering(self):
|
|
|
|
channel_foo = self.get_claim_id(await self.channel_create('@foo'))
|
|
|
|
channel_bar = self.get_claim_id(await self.channel_create('@bar'))
|
|
|
|
stream_a = self.get_claim_id(await self.stream_create('a', channel_id=channel_foo))
|
|
|
|
stream_b = self.get_claim_id(await self.stream_create('b', channel_id=channel_foo))
|
|
|
|
stream_c = self.get_claim_id(await self.stream_create('c', channel_id=channel_bar))
|
|
|
|
stream_d = self.get_claim_id(await self.stream_create('d'))
|
2021-01-25 16:14:34 +01:00
|
|
|
support_c = await self.support_create(stream_c, '0.3', channel_id=channel_foo)
|
|
|
|
support_d = await self.support_create(stream_d, '0.3', channel_id=channel_bar)
|
2021-01-21 18:54:42 +01:00
|
|
|
|
|
|
|
r = await self.txo_list(type='stream')
|
|
|
|
self.assertEqual({stream_a, stream_b, stream_c, stream_d}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(type='stream', channel_id=channel_foo)
|
|
|
|
self.assertEqual({stream_a, stream_b}, {c['claim_id'] for c in r})
|
|
|
|
|
2021-01-25 16:14:34 +01:00
|
|
|
r = await self.txo_list(type='support', channel_id=channel_foo)
|
|
|
|
self.assertEqual({support_c['txid']}, {s['txid'] for s in r})
|
|
|
|
r = await self.txo_list(type='support', channel_id=channel_bar)
|
|
|
|
self.assertEqual({support_d['txid']}, {s['txid'] for s in r})
|
|
|
|
|
2021-01-21 18:54:42 +01:00
|
|
|
r = await self.txo_list(type='stream', channel_id=[channel_foo, channel_bar])
|
|
|
|
self.assertEqual({stream_a, stream_b, stream_c}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(type='stream', not_channel_id=channel_foo)
|
|
|
|
self.assertEqual({stream_c, stream_d}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(type='stream', not_channel_id=[channel_foo, channel_bar])
|
|
|
|
self.assertEqual({stream_d}, {c['claim_id'] for c in r})
|
|
|
|
|
2020-03-21 00:07:16 +01:00
|
|
|
async def test_txo_list_and_sum_filtering(self):
|
2020-03-07 06:34:47 +01:00
|
|
|
channel_id = self.get_claim_id(await self.channel_create())
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('1.0', lbc(await self.txo_sum(type='channel', is_not_spent=True)))
|
2020-03-07 06:34:47 +01:00
|
|
|
await self.channel_update(channel_id, bid='0.5')
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('0.5', lbc(await self.txo_sum(type='channel', is_not_spent=True)))
|
2020-03-21 00:07:16 +01:00
|
|
|
self.assertEqual('1.5', lbc(await self.txo_sum(type='channel')))
|
|
|
|
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create(bid='1.3'))
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('1.3', lbc(await self.txo_sum(type='stream', is_not_spent=True)))
|
2020-03-21 00:07:16 +01:00
|
|
|
await self.stream_update(stream_id, bid='0.7')
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('0.7', lbc(await self.txo_sum(type='stream', is_not_spent=True)))
|
2020-03-21 00:07:16 +01:00
|
|
|
self.assertEqual('2.0', lbc(await self.txo_sum(type='stream')))
|
|
|
|
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('1.2', lbc(await self.txo_sum(type=['stream', 'channel'], is_not_spent=True)))
|
2020-03-21 00:07:16 +01:00
|
|
|
self.assertEqual('3.5', lbc(await self.txo_sum(type=['stream', 'channel'])))
|
2020-03-07 06:34:47 +01:00
|
|
|
|
|
|
|
# type filtering
|
|
|
|
r = await self.txo_list(type='channel')
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual('channel', r[0]['value_type'])
|
|
|
|
self.assertFalse(r[0]['is_spent'])
|
|
|
|
self.assertEqual('channel', r[1]['value_type'])
|
|
|
|
self.assertTrue(r[1]['is_spent'])
|
|
|
|
|
|
|
|
r = await self.txo_list(type='stream')
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual('stream', r[0]['value_type'])
|
|
|
|
self.assertFalse(r[0]['is_spent'])
|
|
|
|
self.assertEqual('stream', r[1]['value_type'])
|
|
|
|
self.assertTrue(r[1]['is_spent'])
|
|
|
|
|
|
|
|
r = await self.txo_list(type=['stream', 'channel'])
|
|
|
|
self.assertEqual(4, len(r))
|
|
|
|
self.assertEqual({'stream', 'channel'}, {c['value_type'] for c in r})
|
|
|
|
|
|
|
|
# claim_id filtering
|
|
|
|
r = await self.txo_list(claim_id=stream_id)
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual({stream_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(claim_id=[stream_id, channel_id])
|
|
|
|
self.assertEqual(4, len(r))
|
|
|
|
self.assertEqual({stream_id, channel_id}, {c['claim_id'] for c in r})
|
|
|
|
stream_name, _, channel_name, _ = (c['name'] for c in r)
|
|
|
|
|
|
|
|
r = await self.txo_list(claim_id=['beef'])
|
|
|
|
self.assertEqual(0, len(r))
|
|
|
|
|
|
|
|
# claim_name filtering
|
|
|
|
r = await self.txo_list(name=stream_name)
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual({stream_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(name=[stream_name, channel_name])
|
|
|
|
self.assertEqual(4, len(r))
|
|
|
|
self.assertEqual({stream_id, channel_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.txo_list(name=['beef'])
|
|
|
|
self.assertEqual(0, len(r))
|
|
|
|
|
|
|
|
r = await self.txo_list()
|
|
|
|
self.assertEqual(9, len(r))
|
|
|
|
await self.stream_abandon(claim_id=stream_id)
|
|
|
|
r = await self.txo_list()
|
|
|
|
self.assertEqual(10, len(r))
|
|
|
|
r = await self.txo_list(claim_id=stream_id)
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertTrue(r[0]['is_spent'])
|
|
|
|
self.assertTrue(r[1]['is_spent'])
|
|
|
|
|
2020-03-20 06:11:05 +01:00
|
|
|
async def test_txo_list_my_input_output_filtering(self):
|
2020-03-09 04:11:03 +01:00
|
|
|
wallet2 = await self.daemon.jsonrpc_wallet_create('wallet2', create_account=True)
|
|
|
|
address2 = await self.daemon.jsonrpc_address_unused(wallet_id=wallet2.id)
|
2020-03-20 06:11:05 +01:00
|
|
|
await self.channel_create('@kept-channel')
|
|
|
|
await self.channel_create('@sent-channel', claim_address=address2)
|
2020-04-16 23:55:49 +02:00
|
|
|
await self.wallet_send('2.9', address2)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# all txos on second wallet
|
2020-04-16 23:55:49 +02:00
|
|
|
received_payment, received_channel = await self.txo_list(
|
|
|
|
wallet_id=wallet2.id, is_my_input_or_output=True)
|
2020-03-20 06:11:05 +01:00
|
|
|
self.assertEqual('1.0', received_channel['amount'])
|
|
|
|
self.assertFalse(received_channel['is_my_input'])
|
|
|
|
self.assertTrue(received_channel['is_my_output'])
|
|
|
|
self.assertFalse(received_channel['is_internal_transfer'])
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual('2.9', received_payment['amount'])
|
|
|
|
self.assertFalse(received_payment['is_my_input'])
|
|
|
|
self.assertTrue(received_payment['is_my_output'])
|
|
|
|
self.assertFalse(received_payment['is_internal_transfer'])
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# all txos on default wallet
|
|
|
|
r = await self.txo_list(is_my_input_or_output=True)
|
|
|
|
self.assertEqual(
|
2020-04-16 23:55:49 +02:00
|
|
|
['2.9', '5.047662', '1.0', '7.947786', '1.0', '8.973893', '10.0'],
|
2020-03-20 06:11:05 +01:00
|
|
|
[t['amount'] for t in r]
|
|
|
|
)
|
2020-03-09 04:11:03 +01:00
|
|
|
|
2020-04-16 23:55:49 +02:00
|
|
|
sent_payment, change3, sent_channel, change2, kept_channel, change1, initial_funds = r
|
|
|
|
|
|
|
|
self.assertTrue(sent_payment['is_my_input'])
|
|
|
|
self.assertFalse(sent_payment['is_my_output'])
|
|
|
|
self.assertFalse(sent_payment['is_internal_transfer'])
|
|
|
|
self.assertTrue(change3['is_my_input'])
|
|
|
|
self.assertTrue(change3['is_my_output'])
|
|
|
|
self.assertTrue(change3['is_internal_transfer'])
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
self.assertTrue(sent_channel['is_my_input'])
|
|
|
|
self.assertFalse(sent_channel['is_my_output'])
|
|
|
|
self.assertFalse(sent_channel['is_internal_transfer'])
|
|
|
|
self.assertTrue(change2['is_my_input'])
|
|
|
|
self.assertTrue(change2['is_my_output'])
|
|
|
|
self.assertTrue(change2['is_internal_transfer'])
|
|
|
|
|
|
|
|
self.assertTrue(kept_channel['is_my_input'])
|
|
|
|
self.assertTrue(kept_channel['is_my_output'])
|
|
|
|
self.assertFalse(kept_channel['is_internal_transfer'])
|
|
|
|
self.assertTrue(change1['is_my_input'])
|
|
|
|
self.assertTrue(change1['is_my_output'])
|
|
|
|
self.assertTrue(change1['is_internal_transfer'])
|
|
|
|
|
|
|
|
self.assertFalse(initial_funds['is_my_input'])
|
|
|
|
self.assertTrue(initial_funds['is_my_output'])
|
|
|
|
self.assertFalse(initial_funds['is_internal_transfer'])
|
|
|
|
|
|
|
|
# my stuff and stuff i sent excluding "change"
|
|
|
|
r = await self.txo_list(is_my_input_or_output=True, exclude_internal_transfers=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([sent_payment, sent_channel, kept_channel, initial_funds], r)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# my unspent stuff and stuff i sent excluding "change"
|
2020-04-02 02:44:34 +02:00
|
|
|
r = await self.txo_list(is_my_input_or_output=True, is_not_spent=True, exclude_internal_transfers=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([sent_payment, sent_channel, kept_channel], r)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# only "change"
|
|
|
|
r = await self.txo_list(is_my_input=True, is_my_output=True, type="other")
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([change3, change2, change1], r)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# only unspent "change"
|
2020-04-02 02:44:34 +02:00
|
|
|
r = await self.txo_list(is_my_input=True, is_my_output=True, type="other", is_not_spent=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([change3], r)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
2020-04-01 05:08:51 +02:00
|
|
|
# only spent "change"
|
|
|
|
r = await self.txo_list(is_my_input=True, is_my_output=True, type="other", is_spent=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([change2, change1], r)
|
2020-04-01 05:08:51 +02:00
|
|
|
|
2020-03-20 06:11:05 +01:00
|
|
|
# all my unspent stuff
|
2020-04-02 02:44:34 +02:00
|
|
|
r = await self.txo_list(is_my_output=True, is_not_spent=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([change3, kept_channel], r)
|
2020-03-20 06:11:05 +01:00
|
|
|
|
|
|
|
# stuff i sent
|
|
|
|
r = await self.txo_list(is_not_my_output=True)
|
2020-04-16 23:55:49 +02:00
|
|
|
self.assertEqual([sent_payment, sent_channel], r)
|
2020-03-09 04:11:03 +01:00
|
|
|
|
2020-03-26 05:37:13 +01:00
|
|
|
async def test_txo_plot(self):
|
|
|
|
day_blocks = int((24 * 60 * 60) / self.ledger.headers.timestamp_average_offset)
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create())
|
|
|
|
await self.support_create(stream_id, '0.3')
|
|
|
|
await self.support_create(stream_id, '0.2')
|
2021-07-16 20:46:46 +02:00
|
|
|
await self.generate(day_blocks // 2)
|
|
|
|
await self.stream_update(stream_id)
|
|
|
|
await self.generate(day_blocks // 2)
|
2020-03-26 05:37:13 +01:00
|
|
|
await self.support_create(stream_id, '0.4')
|
|
|
|
await self.support_create(stream_id, '0.5')
|
2021-07-16 20:46:46 +02:00
|
|
|
await self.stream_update(stream_id)
|
|
|
|
await self.generate(day_blocks // 2)
|
|
|
|
await self.stream_update(stream_id)
|
|
|
|
await self.generate(day_blocks // 2)
|
2020-03-26 05:37:13 +01:00
|
|
|
await self.support_create(stream_id, '0.6')
|
|
|
|
|
|
|
|
plot = await self.txo_plot(type='support')
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', days_back=1)
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', days_back=2)
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-23', 'total': '0.5'},
|
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
|
|
|
|
plot = await self.txo_plot(type='support', start_day='2016-06-23')
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-23', 'total': '0.5'},
|
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', start_day='2016-06-24')
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', start_day='2016-06-23', end_day='2016-06-24')
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-23', 'total': '0.5'},
|
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', start_day='2016-06-23', days_after=1)
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-23', 'total': '0.5'},
|
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
plot = await self.txo_plot(type='support', start_day='2016-06-23', days_after=2)
|
|
|
|
self.assertEqual([
|
2020-03-26 06:13:09 +01:00
|
|
|
{'day': '2016-06-23', 'total': '0.5'},
|
|
|
|
{'day': '2016-06-24', 'total': '0.9'},
|
|
|
|
{'day': '2016-06-25', 'total': '0.6'},
|
2020-03-26 05:37:13 +01:00
|
|
|
], plot)
|
|
|
|
|
2020-03-27 03:16:05 +01:00
|
|
|
async def test_txo_spend(self):
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create())
|
|
|
|
for _ in range(10):
|
|
|
|
await self.support_create(stream_id, '0.1')
|
|
|
|
await self.assertBalance(self.account, '7.978478')
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('1.0', lbc(await self.txo_sum(type='support', is_not_spent=True)))
|
2020-03-31 00:15:13 +02:00
|
|
|
txs = await self.txo_spend(type='support', batch_size=3, include_full_tx=True)
|
2020-03-27 03:16:05 +01:00
|
|
|
self.assertEqual(4, len(txs))
|
|
|
|
self.assertEqual(3, len(txs[0]['inputs']))
|
|
|
|
self.assertEqual(3, len(txs[1]['inputs']))
|
|
|
|
self.assertEqual(3, len(txs[2]['inputs']))
|
|
|
|
self.assertEqual(1, len(txs[3]['inputs']))
|
2020-04-02 02:44:34 +02:00
|
|
|
self.assertEqual('0.0', lbc(await self.txo_sum(type='support', is_not_spent=True)))
|
2020-03-27 03:16:05 +01:00
|
|
|
await self.assertBalance(self.account, '8.977606')
|
|
|
|
|
2020-03-31 00:15:13 +02:00
|
|
|
await self.support_create(stream_id, '0.1')
|
|
|
|
txs = await self.daemon.jsonrpc_txo_spend(type='support', batch_size=3)
|
|
|
|
self.assertEqual(1, len(txs))
|
|
|
|
self.assertEqual({'txid'}, set(txs[0]))
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
|
2020-02-01 23:59:10 +01:00
|
|
|
class ClaimCommands(ClaimTestCase):
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
async def test_claim_list_filtering(self):
|
|
|
|
channel_id = self.get_claim_id(await self.channel_create())
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create())
|
2020-02-09 05:12:03 +01:00
|
|
|
|
2020-04-22 16:36:09 +02:00
|
|
|
await self.stream_update(stream_id, title='foo')
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
# type filtering
|
2020-02-09 05:12:03 +01:00
|
|
|
r = await self.claim_list(claim_type='channel')
|
|
|
|
self.assertEqual(1, len(r))
|
|
|
|
self.assertEqual('channel', r[0]['value_type'])
|
|
|
|
|
2020-04-22 16:36:09 +02:00
|
|
|
# catch a bug where cli sends is_spent=False by default
|
|
|
|
r = await self.claim_list(claim_type='stream', is_spent=False)
|
2020-02-09 05:12:03 +01:00
|
|
|
self.assertEqual(1, len(r))
|
|
|
|
self.assertEqual('stream', r[0]['value_type'])
|
|
|
|
|
|
|
|
r = await self.claim_list(claim_type=['stream', 'channel'])
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual({'stream', 'channel'}, {c['value_type'] for c in r})
|
|
|
|
|
2020-03-07 06:34:47 +01:00
|
|
|
# claim_id filtering
|
|
|
|
r = await self.claim_list(claim_id=stream_id)
|
|
|
|
self.assertEqual(1, len(r))
|
|
|
|
self.assertEqual({stream_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.claim_list(claim_id=[stream_id, channel_id])
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual({stream_id, channel_id}, {c['claim_id'] for c in r})
|
|
|
|
stream_name, channel_name = (c['name'] for c in r)
|
|
|
|
|
|
|
|
r = await self.claim_list(claim_id=['beef'])
|
|
|
|
self.assertEqual(0, len(r))
|
|
|
|
|
|
|
|
# claim_name filtering
|
|
|
|
r = await self.claim_list(name=stream_name)
|
|
|
|
self.assertEqual(1, len(r))
|
|
|
|
self.assertEqual({stream_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.claim_list(name=[stream_name, channel_name])
|
|
|
|
self.assertEqual(2, len(r))
|
|
|
|
self.assertEqual({stream_id, channel_id}, {c['claim_id'] for c in r})
|
|
|
|
|
|
|
|
r = await self.claim_list(name=['beef'])
|
|
|
|
self.assertEqual(0, len(r))
|
|
|
|
|
2020-02-01 23:59:10 +01:00
|
|
|
async def test_claim_stream_channel_list_with_resolve(self):
|
2020-02-13 20:58:51 +01:00
|
|
|
self.assertListEqual([], await self.claim_list(resolve=True))
|
|
|
|
|
2020-02-01 23:59:10 +01:00
|
|
|
await self.channel_create()
|
|
|
|
await self.stream_create()
|
|
|
|
|
|
|
|
r = await self.claim_list()
|
|
|
|
self.assertNotIn('short_url', r[0])
|
|
|
|
self.assertNotIn('short_url', r[1])
|
|
|
|
self.assertNotIn('short_url', (await self.stream_list())[0])
|
|
|
|
self.assertNotIn('short_url', (await self.channel_list())[0])
|
|
|
|
|
|
|
|
r = await self.claim_list(resolve=True)
|
|
|
|
self.assertIn('short_url', r[0])
|
|
|
|
self.assertIn('short_url', r[1])
|
|
|
|
self.assertIn('short_url', (await self.stream_list(resolve=True))[0])
|
|
|
|
self.assertIn('short_url', (await self.channel_list(resolve=True))[0])
|
|
|
|
|
|
|
|
# unconfirmed channel won't resolve
|
|
|
|
channel_tx = await self.daemon.jsonrpc_channel_create('@foo', '1.0')
|
|
|
|
await self.ledger.wait(channel_tx)
|
|
|
|
|
|
|
|
r = await self.claim_list(resolve=True)
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
2020-02-01 23:59:10 +01:00
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
r = await self.channel_list(resolve=True)
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
2020-02-01 23:59:10 +01:00
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
|
|
|
|
# confirm it
|
|
|
|
await self.generate(1)
|
|
|
|
await self.ledger.wait(channel_tx, self.blockchain.block_expected)
|
|
|
|
|
|
|
|
# all channel claims resolve
|
|
|
|
r = await self.claim_list(resolve=True)
|
|
|
|
self.assertTrue(r[0]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
r = await self.channel_list(resolve=True)
|
|
|
|
self.assertTrue(r[0]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
|
|
|
|
# unconfirmed stream won't resolve
|
|
|
|
stream_tx = await self.daemon.jsonrpc_stream_create(
|
|
|
|
'foo', '1.0', file_path=self.create_upload_file(data=b'hi')
|
|
|
|
)
|
|
|
|
await self.ledger.wait(stream_tx)
|
|
|
|
|
|
|
|
r = await self.claim_list(resolve=True)
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
2020-02-01 23:59:10 +01:00
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
r = await self.stream_list(resolve=True)
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual('NOT_FOUND', r[0]['meta']['error']['name'])
|
2020-02-01 23:59:10 +01:00
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
|
|
|
|
# confirm it
|
|
|
|
await self.generate(1)
|
|
|
|
await self.ledger.wait(stream_tx, self.blockchain.block_expected)
|
|
|
|
|
|
|
|
# all claims resolve
|
|
|
|
r = await self.claim_list(resolve=True)
|
|
|
|
self.assertTrue(r[0]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[2]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[3]['meta']['is_controlling'])
|
|
|
|
r = await self.stream_list(resolve=True)
|
|
|
|
self.assertTrue(r[0]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
r = await self.channel_list(resolve=True)
|
|
|
|
self.assertTrue(r[0]['meta']['is_controlling'])
|
|
|
|
self.assertTrue(r[1]['meta']['is_controlling'])
|
|
|
|
|
2020-02-29 15:42:11 +01:00
|
|
|
# check that metadata is transfered
|
2020-03-20 06:11:05 +01:00
|
|
|
self.assertTrue(r[0]['is_my_output'])
|
2020-02-29 15:42:11 +01:00
|
|
|
|
2020-03-21 04:19:26 +01:00
|
|
|
async def assertClaimList(self, claim_ids, **kwargs):
|
|
|
|
self.assertEqual(claim_ids, [c['claim_id'] for c in await self.claim_list(**kwargs)])
|
|
|
|
|
|
|
|
async def test_list_streams_in_channel_and_order_by(self):
|
|
|
|
channel1_id = self.get_claim_id(await self.channel_create('@chan-one'))
|
|
|
|
channel2_id = self.get_claim_id(await self.channel_create('@chan-two'))
|
|
|
|
stream1_id = self.get_claim_id(await self.stream_create('stream-a', bid='0.3', channel_id=channel1_id))
|
|
|
|
stream2_id = self.get_claim_id(await self.stream_create('stream-b', bid='0.9', channel_id=channel1_id))
|
|
|
|
stream3_id = self.get_claim_id(await self.stream_create('stream-c', bid='0.6', channel_id=channel2_id))
|
|
|
|
await self.assertClaimList([stream2_id, stream1_id], channel_id=channel1_id)
|
|
|
|
await self.assertClaimList([stream3_id], channel_id=channel2_id)
|
|
|
|
await self.assertClaimList([stream3_id, stream2_id, stream1_id], channel_id=[channel1_id, channel2_id])
|
|
|
|
await self.assertClaimList([stream1_id, stream2_id, stream3_id], claim_type='stream', order_by='name')
|
|
|
|
await self.assertClaimList([stream1_id, stream3_id, stream2_id], claim_type='stream', order_by='amount')
|
|
|
|
await self.assertClaimList([stream3_id, stream2_id, stream1_id], claim_type='stream', order_by='height')
|
|
|
|
|
2020-03-22 22:22:15 +01:00
|
|
|
async def test_claim_list_with_tips(self):
|
|
|
|
wallet2 = await self.daemon.jsonrpc_wallet_create('wallet2', create_account=True)
|
|
|
|
address2 = await self.daemon.jsonrpc_address_unused(wallet_id=wallet2.id)
|
|
|
|
|
|
|
|
await self.wallet_send('5.0', address2)
|
|
|
|
|
|
|
|
stream1_id = self.get_claim_id(await self.stream_create('one'))
|
|
|
|
stream2_id = self.get_claim_id(await self.stream_create('two'))
|
|
|
|
|
|
|
|
claims = await self.claim_list()
|
|
|
|
self.assertNotIn('received_tips', claims[0])
|
|
|
|
self.assertNotIn('received_tips', claims[1])
|
|
|
|
|
|
|
|
claims = await self.claim_list(include_received_tips=True)
|
|
|
|
self.assertEqual('0.0', claims[0]['received_tips'])
|
|
|
|
self.assertEqual('0.0', claims[1]['received_tips'])
|
|
|
|
|
|
|
|
await self.support_create(stream1_id, '0.7', tip=True)
|
|
|
|
await self.support_create(stream1_id, '0.3', tip=True, wallet_id=wallet2.id)
|
|
|
|
await self.support_create(stream1_id, '0.2', tip=True, wallet_id=wallet2.id)
|
|
|
|
await self.support_create(stream2_id, '0.4', tip=True, wallet_id=wallet2.id)
|
|
|
|
await self.support_create(stream2_id, '0.5', tip=True, wallet_id=wallet2.id)
|
|
|
|
await self.support_create(stream2_id, '0.1', tip=True, wallet_id=wallet2.id)
|
|
|
|
|
|
|
|
claims = await self.claim_list(include_received_tips=True)
|
|
|
|
self.assertEqual('1.0', claims[0]['received_tips'])
|
|
|
|
self.assertEqual('1.2', claims[1]['received_tips'])
|
|
|
|
|
|
|
|
await self.support_abandon(stream1_id)
|
|
|
|
claims = await self.claim_list(include_received_tips=True)
|
|
|
|
self.assertEqual('1.0', claims[0]['received_tips'])
|
|
|
|
self.assertEqual('0.0', claims[1]['received_tips'])
|
|
|
|
|
|
|
|
await self.support_abandon(stream2_id)
|
|
|
|
claims = await self.claim_list(include_received_tips=True)
|
|
|
|
self.assertEqual('0.0', claims[0]['received_tips'])
|
|
|
|
self.assertEqual('0.0', claims[1]['received_tips'])
|
|
|
|
|
2021-04-23 16:39:53 +02:00
|
|
|
async def stream_update_and_wait(self, claim_id, **kwargs):
|
|
|
|
tx = await self.daemon.jsonrpc_stream_update(claim_id, **kwargs)
|
|
|
|
await self.ledger.wait(tx)
|
|
|
|
|
|
|
|
async def test_claim_list_pending_edits_ordering(self):
|
|
|
|
stream5_id = self.get_claim_id(await self.stream_create('five'))
|
|
|
|
stream4_id = self.get_claim_id(await self.stream_create('four'))
|
|
|
|
stream3_id = self.get_claim_id(await self.stream_create('three'))
|
|
|
|
stream2_id = self.get_claim_id(await self.stream_create('two'))
|
|
|
|
stream1_id = self.get_claim_id(await self.stream_create('one'))
|
|
|
|
await self.assertClaimList([stream1_id, stream2_id, stream3_id, stream4_id, stream5_id])
|
|
|
|
await self.stream_update_and_wait(stream4_id, title='foo')
|
|
|
|
await self.assertClaimList([stream4_id, stream1_id, stream2_id, stream3_id, stream5_id])
|
|
|
|
await self.stream_update_and_wait(stream3_id, title='foo')
|
|
|
|
await self.assertClaimList([stream4_id, stream3_id, stream1_id, stream2_id, stream5_id])
|
|
|
|
|
2020-02-01 23:59:10 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
class ChannelCommands(CommandTestCase):
|
|
|
|
|
|
|
|
async def test_create_channel_names(self):
|
|
|
|
# claim new name
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.channel_create('@foo')
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '8.991893')
|
|
|
|
|
|
|
|
# fail to claim duplicate
|
|
|
|
with self.assertRaisesRegex(Exception, "You already have a channel under the name '@foo'."):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.channel_create('@foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# fail to claim invalid name
|
2019-03-26 03:06:36 +01:00
|
|
|
with self.assertRaisesRegex(Exception, "Channel names must start with '@' symbol."):
|
|
|
|
await self.channel_create('foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# nothing's changed after failed attempts
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '8.991893')
|
|
|
|
|
|
|
|
# succeed overriding duplicate restriction
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.channel_create('@foo', allow_duplicate_name=True)
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 2)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '7.983786')
|
|
|
|
|
|
|
|
async def test_channel_bids(self):
|
|
|
|
# enough funds
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.channel_create('@foo', '5.0')
|
2019-06-24 01:58:41 +02:00
|
|
|
claim_id = self.get_claim_id(tx)
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '4.991893')
|
|
|
|
|
|
|
|
# bid preserved on update
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.channel_update(claim_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '5.0')
|
|
|
|
|
|
|
|
# bid changed on update
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.channel_update(claim_id, bid='4.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '4.0')
|
|
|
|
|
2021-12-19 22:07:01 +01:00
|
|
|
await self.assertBalance(self.account, '5.991503')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# not enough funds
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
InsufficientFundsError, "Not enough funds to cover this transaction."):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.channel_create('@foo2', '9.0')
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 1)
|
2021-12-19 22:07:01 +01:00
|
|
|
await self.assertBalance(self.account, '5.991503')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# spend exactly amount available, no change
|
2021-12-19 22:07:01 +01:00
|
|
|
tx = await self.channel_create('@foo3', '5.981322')
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '0.0')
|
|
|
|
self.assertEqual(len(tx['outputs']), 1) # no change
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 2)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
async def test_setting_channel_fields(self):
|
|
|
|
values = {
|
|
|
|
'title': "Cool Channel",
|
|
|
|
'description': "Best channel on LBRY.",
|
2019-03-25 17:30:30 +01:00
|
|
|
'thumbnail_url': "https://co.ol/thumbnail.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
'tags': ["cool", "awesome"],
|
2019-03-30 01:26:10 +01:00
|
|
|
'languages': ["en-US"],
|
|
|
|
'locations': ['US::Manchester'],
|
2019-04-20 07:12:43 +02:00
|
|
|
'email': "human@email.com",
|
|
|
|
'website_url': "https://co.ol",
|
2019-03-25 17:30:30 +01:00
|
|
|
'cover_url': "https://co.ol/cover.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
'featured': ['cafe']
|
2019-03-24 21:55:04 +01:00
|
|
|
}
|
2019-03-28 01:32:43 +01:00
|
|
|
fixed_values = values.copy()
|
2019-04-20 07:12:43 +02:00
|
|
|
fixed_values['thumbnail'] = {'url': fixed_values.pop('thumbnail_url')}
|
2019-04-21 05:54:34 +02:00
|
|
|
fixed_values['locations'] = [{'country': 'US', 'city': 'Manchester'}]
|
2019-04-20 07:12:43 +02:00
|
|
|
fixed_values['cover'] = {'url': fixed_values.pop('cover_url')}
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# create new channel with all fields set
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.channel_create('@bigchannel', **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
channel = tx['outputs'][0]['value']
|
2019-06-04 03:28:32 +02:00
|
|
|
self.assertEqual(channel, {
|
|
|
|
'public_key': channel['public_key'],
|
|
|
|
'public_key_id': channel['public_key_id'],
|
|
|
|
**fixed_values
|
|
|
|
})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
# create channel with nothing set
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.channel_create('@lightchannel'))
|
2019-04-21 05:54:34 +02:00
|
|
|
channel = tx['outputs'][0]['value']
|
2019-06-04 03:28:32 +02:00
|
|
|
self.assertEqual(
|
|
|
|
channel, {'public_key': channel['public_key'], 'public_key_id': channel['public_key_id']})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# create channel with just a featured claim
|
|
|
|
tx = await self.out(self.channel_create('@featurechannel', featured='beef'))
|
2019-03-24 21:55:04 +01:00
|
|
|
txo = tx['outputs'][0]
|
2019-04-21 05:54:34 +02:00
|
|
|
claim_id, channel = txo['claim_id'], txo['value']
|
|
|
|
fixed_values['public_key'] = channel['public_key']
|
2019-06-04 03:28:32 +02:00
|
|
|
fixed_values['public_key_id'] = channel['public_key_id']
|
|
|
|
self.assertEqual(channel, {
|
|
|
|
'public_key': fixed_values['public_key'],
|
|
|
|
'public_key_id': fixed_values['public_key_id'],
|
|
|
|
'featured': ['beef']
|
|
|
|
})
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-05-19 22:21:54 +02:00
|
|
|
# update channel "@featurechannel" setting all fields
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.channel_update(claim_id, **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
fixed_values['featured'].insert(0, 'beef') # existing featured claim
|
|
|
|
self.assertEqual(channel, fixed_values)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# clearing and settings featured content
|
|
|
|
tx = await self.out(self.channel_update(claim_id, featured='beefcafe', clear_featured=True))
|
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
fixed_values['featured'] = ['beefcafe']
|
|
|
|
self.assertEqual(channel, fixed_values)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
# reset signing key
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.channel_update(claim_id, new_signing_key=True))
|
2019-04-21 05:54:34 +02:00
|
|
|
channel = tx['outputs'][0]['value']
|
|
|
|
self.assertNotEqual(channel['public_key'], fixed_values['public_key'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-04-27 02:44:38 +02:00
|
|
|
# replace mode (clears everything except public_key)
|
|
|
|
tx = await self.out(self.channel_update(claim_id, replace=True, title='foo', email='new@email.com'))
|
2019-06-04 03:28:32 +02:00
|
|
|
self.assertEqual(tx['outputs'][0]['value'], {
|
|
|
|
'public_key': channel['public_key'],
|
|
|
|
'public_key_id': channel['public_key_id'],
|
|
|
|
'title': 'foo', 'email': 'new@email.com'}
|
2019-04-27 02:44:38 +02:00
|
|
|
)
|
|
|
|
|
2019-09-20 06:05:37 +02:00
|
|
|
# move channel to another account
|
2019-04-06 21:55:08 +02:00
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-09-20 06:05:37 +02:00
|
|
|
account2_id, account2 = new_account['id'], self.wallet.get_account_or_error(new_account['id'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-09-20 06:05:37 +02:00
|
|
|
# before moving
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 3)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(account_id=account2_id), 0)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
other_address = await account2.receiving.get_or_create_usable_address()
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.channel_update(claim_id, claim_address=other_address))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-09-20 06:05:37 +02:00
|
|
|
# after moving
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(), 3)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(account_id=self.account.id), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_channel_list(account_id=account2_id), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2020-12-24 05:55:58 +01:00
|
|
|
async def test_sign_hex_encoded_data(self):
|
|
|
|
data_to_sign = "CAFEBABE"
|
|
|
|
# claim new name
|
|
|
|
await self.channel_create('@someotherchan')
|
2021-12-20 20:46:01 +01:00
|
|
|
channel_tx = await self.daemon.jsonrpc_channel_create('@signer', '0.1', blocking=True)
|
2020-12-24 06:55:47 +01:00
|
|
|
await self.confirm_tx(channel_tx.id)
|
|
|
|
channel = channel_tx.outputs[0]
|
2020-12-24 05:55:58 +01:00
|
|
|
signature1 = await self.out(self.daemon.jsonrpc_channel_sign(channel_name='@signer', hexdata=data_to_sign))
|
2020-12-24 06:55:47 +01:00
|
|
|
signature2 = await self.out(self.daemon.jsonrpc_channel_sign(channel_id=channel.claim_id, hexdata=data_to_sign))
|
|
|
|
self.assertTrue(verify(channel, unhexlify(data_to_sign), signature1))
|
|
|
|
self.assertTrue(verify(channel, unhexlify(data_to_sign), signature2))
|
2022-01-10 14:44:28 +01:00
|
|
|
signature3 = await self.out(self.daemon.jsonrpc_channel_sign(channel_id=channel.claim_id, hexdata=99))
|
|
|
|
self.assertTrue(verify(channel, unhexlify('99'), signature3))
|
2020-12-24 05:55:58 +01:00
|
|
|
|
2019-09-20 06:05:37 +02:00
|
|
|
async def test_channel_export_import_before_sending_channel(self):
|
|
|
|
# export
|
2019-05-22 12:33:57 +02:00
|
|
|
tx = await self.channel_create('@foo', '1.0')
|
2019-06-24 01:58:41 +02:00
|
|
|
claim_id = self.get_claim_id(tx)
|
2019-05-22 12:33:57 +02:00
|
|
|
channel_private_key = (await self.account.get_channels())[0].private_key
|
2019-05-29 23:40:22 +02:00
|
|
|
exported_data = await self.out(self.daemon.jsonrpc_channel_export(claim_id))
|
2019-09-20 06:05:37 +02:00
|
|
|
|
|
|
|
# import
|
2019-05-29 23:40:22 +02:00
|
|
|
daemon2 = await self.add_daemon()
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await daemon2.jsonrpc_channel_list(), 0)
|
2019-05-29 23:40:22 +02:00
|
|
|
await daemon2.jsonrpc_channel_import(exported_data)
|
2019-10-26 05:34:44 +02:00
|
|
|
channels = (await daemon2.jsonrpc_channel_list())['items']
|
2019-05-29 23:40:22 +02:00
|
|
|
self.assertEqual(1, len(channels))
|
2021-12-19 22:07:01 +01:00
|
|
|
self.assertEqual(channel_private_key.private_key_bytes, channels[0].private_key.private_key_bytes)
|
2019-05-22 12:33:57 +02:00
|
|
|
|
2019-09-20 06:05:37 +02:00
|
|
|
# second wallet can't update until channel is sent to it
|
|
|
|
with self.assertRaisesRegex(AssertionError, 'Cannot find private key for signing output.'):
|
|
|
|
await daemon2.jsonrpc_channel_update(claim_id, bid='0.5')
|
|
|
|
|
|
|
|
# now send the channel as well
|
|
|
|
await self.channel_update(claim_id, claim_address=await daemon2.jsonrpc_address_unused())
|
|
|
|
|
|
|
|
# second wallet should be able to update now
|
|
|
|
await daemon2.jsonrpc_channel_update(claim_id, bid='0.5')
|
|
|
|
|
2019-10-04 05:14:09 +02:00
|
|
|
async def test_channel_update_across_accounts(self):
|
|
|
|
account2 = await self.daemon.jsonrpc_account_create('second account')
|
|
|
|
channel = await self.out(self.channel_create('@spam', '1.0', account_id=account2.id))
|
|
|
|
# channel not in account1
|
|
|
|
with self.assertRaisesRegex(Exception, "Can't find the channel"):
|
|
|
|
await self.channel_update(self.get_claim_id(channel), bid='2.0', account_id=self.account.id)
|
|
|
|
# channel is in account2
|
|
|
|
await self.channel_update(self.get_claim_id(channel), bid='2.0', account_id=account2.id)
|
2019-10-26 05:34:44 +02:00
|
|
|
result = (await self.out(self.daemon.jsonrpc_channel_list()))['items']
|
2019-10-04 05:14:09 +02:00
|
|
|
self.assertEqual(result[0]['amount'], '2.0')
|
|
|
|
# check all accounts for channel
|
|
|
|
await self.channel_update(self.get_claim_id(channel), bid='3.0')
|
2019-10-26 05:34:44 +02:00
|
|
|
result = (await self.out(self.daemon.jsonrpc_channel_list()))['items']
|
2019-10-04 05:14:09 +02:00
|
|
|
self.assertEqual(result[0]['amount'], '3.0')
|
|
|
|
await self.channel_abandon(self.get_claim_id(channel))
|
|
|
|
|
2019-09-25 12:28:02 +02:00
|
|
|
async def test_tag_normalization(self):
|
|
|
|
tx1 = await self.channel_create('@abc', '1.0', tags=['aBc', ' ABC ', 'xYZ ', 'xyz'])
|
|
|
|
claim_id = self.get_claim_id(tx1)
|
|
|
|
self.assertCountEqual(tx1['outputs'][0]['value']['tags'], ['abc', 'xyz'])
|
|
|
|
|
|
|
|
tx2 = await self.channel_update(claim_id, tags=[' pqr', 'PQr '])
|
|
|
|
self.assertCountEqual(tx2['outputs'][0]['value']['tags'], ['abc', 'xyz', 'pqr'])
|
|
|
|
|
|
|
|
tx3 = await self.channel_update(claim_id, tags=' pqr')
|
|
|
|
self.assertCountEqual(tx3['outputs'][0]['value']['tags'], ['abc', 'xyz', 'pqr'])
|
|
|
|
|
|
|
|
tx4 = await self.channel_update(claim_id, tags=[' pqr', 'PQr '], clear_tags=True)
|
|
|
|
self.assertEqual(tx4['outputs'][0]['value']['tags'], ['pqr'])
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-10-24 22:48:00 +02:00
|
|
|
|
2019-06-04 06:10:59 +02:00
|
|
|
class StreamCommands(ClaimTestCase):
|
2019-04-21 05:54:34 +02:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
async def test_create_stream_names(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
# claim new name
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo')
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '8.993893')
|
|
|
|
|
|
|
|
# fail to claim duplicate
|
2019-03-26 03:06:36 +01:00
|
|
|
with self.assertRaisesRegex(
|
|
|
|
Exception, "You already have a stream claim published under the name 'foo'."):
|
|
|
|
await self.stream_create('foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# fail claim starting with @
|
2019-03-26 03:06:36 +01:00
|
|
|
with self.assertRaisesRegex(
|
|
|
|
Exception, "Stream names cannot start with '@' symbol."):
|
|
|
|
await self.stream_create('@foo')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '8.993893')
|
|
|
|
|
|
|
|
# succeed overriding duplicate restriction
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo', allow_duplicate_name=True)
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 2)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '7.987786')
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
async def test_stream_bids(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
# enough funds
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create('foo', '2.0')
|
2019-06-24 01:58:41 +02:00
|
|
|
claim_id = self.get_claim_id(tx)
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '7.993893')
|
|
|
|
|
|
|
|
# bid preserved on update
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_update(claim_id)
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '2.0')
|
|
|
|
|
|
|
|
# bid changed on update
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_update(claim_id, bid='3.0')
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(tx['outputs'][0]['amount'], '3.0')
|
|
|
|
|
2019-04-29 21:18:28 +02:00
|
|
|
await self.assertBalance(self.account, '6.993319')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# not enough funds
|
|
|
|
with self.assertRaisesRegex(
|
|
|
|
InsufficientFundsError, "Not enough funds to cover this transaction."):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create('foo2', '9.0')
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 1)
|
2019-04-29 21:18:28 +02:00
|
|
|
await self.assertBalance(self.account, '6.993319')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# spend exactly amount available, no change
|
2019-04-21 05:54:34 +02:00
|
|
|
tx = await self.stream_create('foo3', '6.98523')
|
2019-03-24 21:55:04 +01:00
|
|
|
await self.assertBalance(self.account, '0.0')
|
|
|
|
self.assertEqual(len(tx['outputs']), 1) # no change
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 2)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-10-04 05:14:09 +02:00
|
|
|
async def test_stream_update_and_abandon_across_accounts(self):
|
|
|
|
account2 = await self.daemon.jsonrpc_account_create('second account')
|
|
|
|
stream = await self.out(self.stream_create('spam', '1.0', account_id=account2.id))
|
|
|
|
# stream not in account1
|
|
|
|
with self.assertRaisesRegex(Exception, "Can't find the stream"):
|
|
|
|
await self.stream_update(self.get_claim_id(stream), bid='2.0', account_id=self.account.id)
|
|
|
|
# stream is in account2
|
|
|
|
await self.stream_update(self.get_claim_id(stream), bid='2.0', account_id=account2.id)
|
2019-10-26 05:34:44 +02:00
|
|
|
result = (await self.out(self.daemon.jsonrpc_stream_list()))['items']
|
2019-10-04 05:14:09 +02:00
|
|
|
self.assertEqual(result[0]['amount'], '2.0')
|
|
|
|
# check all accounts for stream
|
|
|
|
await self.stream_update(self.get_claim_id(stream), bid='3.0')
|
2019-10-26 05:34:44 +02:00
|
|
|
result = (await self.out(self.daemon.jsonrpc_stream_list()))['items']
|
2019-10-04 05:14:09 +02:00
|
|
|
self.assertEqual(result[0]['amount'], '3.0')
|
|
|
|
await self.stream_abandon(self.get_claim_id(stream))
|
|
|
|
|
2019-03-24 21:55:04 +01:00
|
|
|
async def test_publishing_checks_all_accounts_for_channel(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
account1_id, account1 = self.account.id, self.account
|
2019-04-06 21:55:08 +02:00
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-09-20 06:05:37 +02:00
|
|
|
account2_id, account2 = new_account['id'], self.wallet.get_account_or_error(new_account['id'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.out(self.channel_create('@spam', '1.0'))
|
2019-07-26 07:51:21 +02:00
|
|
|
self.assertEqual('8.989893', (await self.daemon.jsonrpc_account_balance())['available'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-25 04:12:57 +01:00
|
|
|
result = await self.out(self.daemon.jsonrpc_account_send(
|
2021-12-20 20:46:01 +01:00
|
|
|
'5.0', await self.daemon.jsonrpc_address_unused(account2_id), blocking=True
|
2019-02-11 23:45:52 +01:00
|
|
|
))
|
|
|
|
await self.confirm_tx(result['txid'])
|
|
|
|
|
2019-07-26 07:51:21 +02:00
|
|
|
self.assertEqual('3.989769', (await self.daemon.jsonrpc_account_balance())['available'])
|
|
|
|
self.assertEqual('5.0', (await self.daemon.jsonrpc_account_balance(account2_id))['available'])
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
baz_tx = await self.out(self.channel_create('@baz', '1.0', account_id=account2_id))
|
2019-06-24 01:58:41 +02:00
|
|
|
baz_id = self.get_claim_id(baz_tx)
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
channels = await self.out(self.daemon.jsonrpc_channel_list(account1_id))
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(channels, 1)
|
|
|
|
self.assertEqual(channels['items'][0]['name'], '@spam')
|
2019-08-12 06:40:05 +02:00
|
|
|
self.assertEqual(channels, await self.out(self.daemon.jsonrpc_channel_list(account1_id)))
|
2019-02-11 23:45:52 +01:00
|
|
|
|
|
|
|
channels = await self.out(self.daemon.jsonrpc_channel_list(account2_id))
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(channels, 1)
|
|
|
|
self.assertEqual(channels['items'][0]['name'], '@baz')
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-08-12 06:40:05 +02:00
|
|
|
channels = await self.out(self.daemon.jsonrpc_channel_list())
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(channels, 2)
|
|
|
|
self.assertEqual(channels['items'][0]['name'], '@baz')
|
|
|
|
self.assertEqual(channels['items'][1]['name'], '@spam')
|
2019-08-12 06:40:05 +02:00
|
|
|
|
2019-02-11 23:45:52 +01:00
|
|
|
# defaults to using all accounts to lookup channel
|
2019-04-27 17:12:02 +02:00
|
|
|
await self.stream_create('hovercraft1', '0.1', channel_id=baz_id)
|
2019-04-29 06:38:58 +02:00
|
|
|
self.assertEqual((await self.claim_search(name='hovercraft1'))[0]['signing_channel']['name'], '@baz')
|
2019-04-27 17:12:02 +02:00
|
|
|
# lookup by channel_name in all accounts
|
|
|
|
await self.stream_create('hovercraft2', '0.1', channel_name='@baz')
|
2019-04-29 06:38:58 +02:00
|
|
|
self.assertEqual((await self.claim_search(name='hovercraft2'))[0]['signing_channel']['name'], '@baz')
|
2019-04-27 17:12:02 +02:00
|
|
|
# uses only the specific accounts which contains the channel
|
|
|
|
await self.stream_create('hovercraft3', '0.1', channel_id=baz_id, channel_account_id=[account2_id])
|
2019-04-29 06:38:58 +02:00
|
|
|
self.assertEqual((await self.claim_search(name='hovercraft3'))[0]['signing_channel']['name'], '@baz')
|
2019-04-27 17:12:02 +02:00
|
|
|
# lookup by channel_name in specific account
|
|
|
|
await self.stream_create('hovercraft4', '0.1', channel_name='@baz', channel_account_id=[account2_id])
|
2019-04-29 06:38:58 +02:00
|
|
|
self.assertEqual((await self.claim_search(name='hovercraft4'))[0]['signing_channel']['name'], '@baz')
|
2019-02-11 23:45:52 +01:00
|
|
|
# fails when specifying account which does not contain channel
|
2019-03-24 21:55:04 +01:00
|
|
|
with self.assertRaisesRegex(ValueError, "Couldn't find channel with channel_id"):
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_create(
|
2019-04-27 17:12:02 +02:00
|
|
|
'hovercraft5', '0.1', channel_id=baz_id, channel_account_id=[account1_id]
|
|
|
|
)
|
|
|
|
# fail with channel_name
|
|
|
|
with self.assertRaisesRegex(ValueError, "Couldn't find channel with channel_name '@baz'"):
|
|
|
|
await self.stream_create(
|
|
|
|
'hovercraft5', '0.1', channel_name='@baz', channel_account_id=[account1_id]
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
|
|
|
|
2019-09-13 15:16:17 +02:00
|
|
|
# signing with channel works even if channel and certificate are in different accounts
|
|
|
|
await self.channel_update(
|
|
|
|
baz_id, account_id=account2_id,
|
|
|
|
claim_address=await self.daemon.jsonrpc_address_unused(account1_id)
|
|
|
|
)
|
|
|
|
await self.stream_create(
|
|
|
|
'hovercraft5', '0.1', channel_id=baz_id
|
|
|
|
)
|
|
|
|
|
2019-04-27 18:31:51 +02:00
|
|
|
async def test_preview_works_with_signed_streams(self):
|
2019-11-11 05:20:14 +01:00
|
|
|
await self.channel_create('@spam', '1.0')
|
|
|
|
signed = await self.stream_create('bar', '1.0', channel_name='@spam', preview=True, confirm=False)
|
2019-04-27 18:31:51 +02:00
|
|
|
self.assertTrue(signed['outputs'][0]['is_channel_signature_valid'])
|
|
|
|
|
2019-10-14 10:17:37 +02:00
|
|
|
async def test_repost(self):
|
2021-10-08 22:30:26 +02:00
|
|
|
tx = await self.channel_create('@goodies', '1.0')
|
|
|
|
goodies_claim_id = self.get_claim_id(tx)
|
|
|
|
tx = await self.channel_create('@spam', '1.0')
|
|
|
|
spam_claim_id = self.get_claim_id(tx)
|
|
|
|
|
2020-02-22 18:23:11 +01:00
|
|
|
tx = await self.stream_create('newstuff', '1.1', channel_name='@goodies', tags=['foo', 'gaming'])
|
2019-11-18 21:48:52 +01:00
|
|
|
claim_id = self.get_claim_id(tx)
|
|
|
|
|
|
|
|
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 0)
|
2020-03-21 01:22:57 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_txo_list(reposted_claim_id=claim_id), 0)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_txo_list(type='repost'), 0)
|
2019-11-18 21:48:52 +01:00
|
|
|
|
2021-10-08 22:30:26 +02:00
|
|
|
tx = await self.stream_repost(claim_id, 'newstuff-again', '1.1', channel_name='@spam')
|
2019-11-18 21:48:52 +01:00
|
|
|
repost_id = self.get_claim_id(tx)
|
2021-10-08 22:30:26 +02:00
|
|
|
|
|
|
|
# test inflating reposted channels works
|
|
|
|
repost_url = f'newstuff-again:{repost_id}'
|
|
|
|
self.ledger._tx_cache.clear()
|
|
|
|
self.assertEqual(
|
|
|
|
goodies_claim_id,
|
|
|
|
(await self.out(self.daemon.jsonrpc_resolve(repost_url))
|
|
|
|
)[repost_url]['reposted_claim']['signing_channel']['claim_id']
|
|
|
|
)
|
|
|
|
|
2019-11-18 21:48:52 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(claim_type='repost'), 1)
|
|
|
|
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 1)
|
|
|
|
self.assertEqual((await self.claim_search(reposted_claim_id=claim_id))[0]['claim_id'], repost_id)
|
2020-03-21 01:22:57 +01:00
|
|
|
self.assertEqual((await self.txo_list(reposted_claim_id=claim_id))[0]['claim_id'], repost_id)
|
|
|
|
self.assertEqual((await self.txo_list(type='repost'))[0]['claim_id'], repost_id)
|
2019-11-18 21:48:52 +01:00
|
|
|
|
2020-02-22 18:23:11 +01:00
|
|
|
# tags are inherited (non-common / indexed tags)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(any_tags=['foo'], claim_type=['stream', 'repost']), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(all_tags=['foo'], claim_type=['stream', 'repost']), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(not_tags=['foo'], claim_type=['stream', 'repost']), 0)
|
|
|
|
# "common" / indexed tags work too
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(any_tags=['gaming'], claim_type=['stream', 'repost']), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(all_tags=['gaming'], claim_type=['stream', 'repost']), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(not_tags=['gaming'], claim_type=['stream', 'repost']), 0)
|
|
|
|
|
2019-11-11 05:20:14 +01:00
|
|
|
await self.channel_create('@reposting-goodies', '1.0')
|
2019-10-14 10:17:37 +02:00
|
|
|
await self.stream_repost(claim_id, 'repost-on-channel', '1.1', channel_name='@reposting-goodies')
|
2019-11-18 21:48:52 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(claim_type='repost'), 2)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_search(reposted_claim_id=claim_id), 2)
|
|
|
|
self.assertEqual((await self.claim_search(name='newstuff'))[0]['meta']['reposted'], 2)
|
|
|
|
|
|
|
|
search_results = await self.claim_search(reposted='>=2')
|
2019-10-15 06:35:02 +02:00
|
|
|
self.assertEqual(len(search_results), 1)
|
2019-11-18 21:48:52 +01:00
|
|
|
self.assertEqual(search_results[0]['name'], 'newstuff')
|
|
|
|
|
|
|
|
search_results = await self.claim_search(name='repost-on-channel')
|
2019-10-14 10:17:37 +02:00
|
|
|
self.assertEqual(len(search_results), 1)
|
2019-11-18 21:48:52 +01:00
|
|
|
search = search_results[0]
|
|
|
|
self.assertEqual(search['name'], 'repost-on-channel')
|
|
|
|
self.assertEqual(search['signing_channel']['name'], '@reposting-goodies')
|
|
|
|
self.assertEqual(search['reposted_claim']['name'], 'newstuff')
|
|
|
|
self.assertEqual(search['reposted_claim']['meta']['reposted'], 2)
|
|
|
|
self.assertEqual(search['reposted_claim']['signing_channel']['name'], '@goodies')
|
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
resolved = await self.out(
|
|
|
|
self.daemon.jsonrpc_resolve(['@reposting-goodies/repost-on-channel', 'newstuff-again'])
|
|
|
|
)
|
2019-11-18 21:48:52 +01:00
|
|
|
self.assertEqual(resolved['@reposting-goodies/repost-on-channel'], search)
|
|
|
|
self.assertEqual(resolved['newstuff-again']['reposted_claim']['name'], 'newstuff')
|
2019-10-14 10:17:37 +02:00
|
|
|
|
2022-01-05 17:44:36 +01:00
|
|
|
await self.stream_update(repost_id, bid='0.42')
|
|
|
|
searched_repost = (await self.claim_search(claim_id=repost_id))[0]
|
|
|
|
self.assertEqual(searched_repost['amount'], '0.42')
|
|
|
|
self.assertEqual(searched_repost['signing_channel']['claim_id'], spam_claim_id)
|
|
|
|
|
2019-11-05 08:02:05 +01:00
|
|
|
async def test_filtering_channels_for_removing_content(self):
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.channel_create('@some_channel', '0.1')
|
|
|
|
await self.stream_create('good_content', '0.1', channel_name='@some_channel', tags=['good'])
|
2020-01-19 02:58:30 +01:00
|
|
|
bad_content_id = self.get_claim_id(
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.stream_create('bad_content', '0.1', channel_name='@some_channel', tags=['bad'])
|
2020-01-19 02:58:30 +01:00
|
|
|
)
|
2020-02-01 18:53:39 +01:00
|
|
|
filtering_channel_id = self.get_claim_id(
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.channel_create('@filtering', '0.1')
|
2020-01-19 02:58:30 +01:00
|
|
|
)
|
2021-07-30 04:01:13 +02:00
|
|
|
self.conductor.spv_node.server.db.filtering_channel_hashes.add(bytes.fromhex(filtering_channel_id))
|
2022-02-01 20:04:12 +01:00
|
|
|
self.conductor.spv_node.es_writer.db.filtering_channel_hashes.add(bytes.fromhex(filtering_channel_id))
|
|
|
|
|
|
|
|
self.assertEqual(0, len(self.conductor.spv_node.es_writer.db.filtered_streams))
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.stream_repost(bad_content_id, 'filter1', '0.1', channel_name='@filtering')
|
2022-02-01 20:04:12 +01:00
|
|
|
self.assertEqual(1, len(self.conductor.spv_node.es_writer.db.filtered_streams))
|
2020-01-19 02:58:30 +01:00
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
# search for filtered content directly
|
2020-01-19 02:58:30 +01:00
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content'))
|
2020-02-08 00:50:29 +01:00
|
|
|
blocked = result['blocked']
|
2020-01-19 02:58:30 +01:00
|
|
|
self.assertEqual([], result['items'])
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual(1, blocked['total'])
|
|
|
|
self.assertEqual(1, len(blocked['channels']))
|
|
|
|
self.assertEqual(1, blocked['channels'][0]['blocked'])
|
2021-05-07 17:29:45 +02:00
|
|
|
self.assertTrue(blocked['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
|
2020-01-19 02:58:30 +01:00
|
|
|
|
2021-03-15 02:39:31 +01:00
|
|
|
# same search, but details omitted by 'no_totals'
|
|
|
|
last_result = result
|
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content', no_totals=True))
|
|
|
|
self.assertEqual(result['items'], last_result['items'])
|
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
# search inside channel containing filtered content
|
2020-01-19 02:58:30 +01:00
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel'))
|
2020-02-22 03:59:46 +01:00
|
|
|
filtered = result['blocked']
|
2020-01-19 02:58:30 +01:00
|
|
|
self.assertEqual(1, len(result['items']))
|
2020-02-22 03:59:46 +01:00
|
|
|
self.assertEqual(1, filtered['total'])
|
|
|
|
self.assertEqual(1, len(filtered['channels']))
|
|
|
|
self.assertEqual(1, filtered['channels'][0]['blocked'])
|
2021-05-07 17:29:45 +02:00
|
|
|
self.assertTrue(filtered['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
|
2020-01-19 02:58:30 +01:00
|
|
|
|
2021-03-14 08:56:53 +01:00
|
|
|
# same search, but details omitted by 'no_totals'
|
|
|
|
last_result = result
|
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', no_totals=True))
|
|
|
|
self.assertEqual(result['items'], last_result['items'])
|
|
|
|
|
2020-01-22 07:55:37 +01:00
|
|
|
# content was filtered by not_tag before censoring
|
2020-01-19 02:58:30 +01:00
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(channel='@some_channel', not_tags=["good", "bad"]))
|
|
|
|
self.assertEqual(0, len(result['items']))
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual({"channels": [], "total": 0}, result['blocked'])
|
2019-11-05 08:02:05 +01:00
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
# filtered content can still be resolved
|
|
|
|
result = await self.resolve('lbry://@some_channel/bad_content')
|
|
|
|
self.assertEqual(bad_content_id, result['claim_id'])
|
|
|
|
|
2020-02-01 18:53:39 +01:00
|
|
|
blocking_channel_id = self.get_claim_id(
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.channel_create('@blocking', '0.1')
|
2020-02-01 18:53:39 +01:00
|
|
|
)
|
2021-09-21 08:59:35 +02:00
|
|
|
# test setting from env vars and starting from scratch
|
|
|
|
await self.conductor.spv_node.stop(False)
|
2021-11-09 01:19:36 +01:00
|
|
|
await self.conductor.spv_node.start(self.conductor.lbcwallet_node,
|
2022-02-01 20:04:12 +01:00
|
|
|
extraconf={'blocking_channel_ids': [blocking_channel_id],
|
|
|
|
'filtering_channel_ids': [filtering_channel_id]})
|
2021-09-21 08:59:35 +02:00
|
|
|
await self.daemon.wallet_manager.reset()
|
|
|
|
|
2022-02-01 20:04:12 +01:00
|
|
|
self.assertEqual(0, len(self.conductor.spv_node.es_writer.db.blocked_streams))
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.stream_repost(bad_content_id, 'block1', '0.1', channel_name='@blocking')
|
2022-02-01 20:04:12 +01:00
|
|
|
self.assertEqual(1, len(self.conductor.spv_node.es_writer.db.blocked_streams))
|
2020-02-01 18:53:39 +01:00
|
|
|
|
|
|
|
# blocked content is not resolveable
|
2022-02-01 20:04:12 +01:00
|
|
|
print((await self.resolve('lbry://@some_channel/bad_content')))
|
2020-02-22 03:59:46 +01:00
|
|
|
error = (await self.resolve('lbry://@some_channel/bad_content'))['error']
|
2020-02-08 00:50:29 +01:00
|
|
|
self.assertEqual(error['name'], 'BLOCKED')
|
2020-02-01 18:53:39 +01:00
|
|
|
self.assertTrue(error['text'].startswith("Resolve of 'lbry://@some_channel/bad_content' was censored"))
|
2021-05-07 17:29:45 +02:00
|
|
|
self.assertTrue(error['censor']['short_url'].startswith('lbry://@blocking#'))
|
2020-02-01 18:53:39 +01:00
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
# a filtered/blocked channel impacts all content inside it
|
|
|
|
bad_channel_id = self.get_claim_id(
|
|
|
|
await self.channel_create('@bad_channel', '0.1', tags=['bad-stuff'])
|
|
|
|
)
|
|
|
|
worse_content_id = self.get_claim_id(
|
|
|
|
await self.stream_create('worse_content', '0.1', channel_name='@bad_channel', tags=['bad-stuff'])
|
|
|
|
)
|
|
|
|
|
|
|
|
# check search before filtering channel
|
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(any_tags=['bad-stuff'], order_by=['height']))
|
|
|
|
self.assertEqual(2, result['total_items'])
|
|
|
|
self.assertEqual('worse_content', result['items'][0]['name'])
|
|
|
|
self.assertEqual('@bad_channel', result['items'][1]['name'])
|
|
|
|
|
|
|
|
# filter channel out
|
2021-07-30 04:01:13 +02:00
|
|
|
self.assertEqual(0, len(self.conductor.spv_node.server.db.filtered_channels))
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.stream_repost(bad_channel_id, 'filter2', '0.1', channel_name='@filtering')
|
2021-07-30 04:01:13 +02:00
|
|
|
self.assertEqual(1, len(self.conductor.spv_node.server.db.filtered_channels))
|
2020-02-22 03:59:46 +01:00
|
|
|
|
|
|
|
# same claim search as previous now returns 0 results
|
|
|
|
result = await self.out(self.daemon.jsonrpc_claim_search(any_tags=['bad-stuff'], order_by=['height']))
|
|
|
|
filtered = result['blocked']
|
|
|
|
self.assertEqual(0, len(result['items']))
|
2020-02-22 18:49:20 +01:00
|
|
|
self.assertEqual(3, filtered['total'])
|
2020-02-22 03:59:46 +01:00
|
|
|
self.assertEqual(1, len(filtered['channels']))
|
2020-02-22 18:49:20 +01:00
|
|
|
self.assertEqual(3, filtered['channels'][0]['blocked'])
|
2021-05-07 17:29:45 +02:00
|
|
|
self.assertTrue(filtered['channels'][0]['channel']['short_url'].startswith('lbry://@filtering#'))
|
2020-02-22 03:59:46 +01:00
|
|
|
|
2021-03-15 02:39:31 +01:00
|
|
|
# same search, but details omitted by 'no_totals'
|
|
|
|
last_result = result
|
|
|
|
result = await self.out(
|
|
|
|
self.daemon.jsonrpc_claim_search(any_tags=['bad-stuff'], order_by=['height'], no_totals=True)
|
|
|
|
)
|
|
|
|
self.assertEqual(result['items'], last_result['items'])
|
|
|
|
|
2020-02-22 03:59:46 +01:00
|
|
|
# filtered channel should still resolve
|
|
|
|
result = await self.resolve('lbry://@bad_channel')
|
|
|
|
self.assertEqual(bad_channel_id, result['claim_id'])
|
|
|
|
result = await self.resolve('lbry://@bad_channel/worse_content')
|
|
|
|
self.assertEqual(worse_content_id, result['claim_id'])
|
|
|
|
|
|
|
|
# block channel
|
2021-07-30 04:01:13 +02:00
|
|
|
self.assertEqual(0, len(self.conductor.spv_node.server.db.blocked_channels))
|
2020-02-22 03:59:46 +01:00
|
|
|
await self.stream_repost(bad_channel_id, 'block2', '0.1', channel_name='@blocking')
|
2021-07-30 04:01:13 +02:00
|
|
|
self.assertEqual(1, len(self.conductor.spv_node.server.db.blocked_channels))
|
2020-02-22 03:59:46 +01:00
|
|
|
|
|
|
|
# channel, claim in channel or claim individually no longer resolve
|
|
|
|
self.assertEqual((await self.resolve('lbry://@bad_channel'))['error']['name'], 'BLOCKED')
|
|
|
|
self.assertEqual((await self.resolve('lbry://worse_content'))['error']['name'], 'BLOCKED')
|
|
|
|
self.assertEqual((await self.resolve('lbry://@bad_channel/worse_content'))['error']['name'], 'BLOCKED')
|
|
|
|
|
2019-04-27 16:29:53 +02:00
|
|
|
async def test_publish_updates_file_list(self):
|
2019-11-11 05:20:14 +01:00
|
|
|
tx = await self.stream_create(title='created')
|
2019-04-27 16:29:53 +02:00
|
|
|
txo = tx['outputs'][0]
|
|
|
|
claim_id, expected = txo['claim_id'], txo['value']
|
2019-10-30 03:56:28 +01:00
|
|
|
files = await self.file_list()
|
2019-04-27 16:29:53 +02:00
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
|
|
|
# update with metadata-only changes
|
2019-11-11 05:20:14 +01:00
|
|
|
tx = await self.stream_update(claim_id, title='update 1')
|
2019-10-30 03:56:28 +01:00
|
|
|
files = await self.file_list()
|
2019-04-27 16:29:53 +02:00
|
|
|
expected['title'] = 'update 1'
|
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
|
|
|
# update with new data
|
2019-11-11 05:20:14 +01:00
|
|
|
tx = await self.stream_update(claim_id, title='update 2', data=b'updated data')
|
2019-04-27 16:29:53 +02:00
|
|
|
expected = tx['outputs'][0]['value']
|
2019-10-30 03:56:28 +01:00
|
|
|
files = await self.file_list()
|
2019-04-27 16:29:53 +02:00
|
|
|
self.assertEqual(1, len(files))
|
|
|
|
self.assertEqual(tx['txid'], files[0]['txid'])
|
|
|
|
self.assertEqual(expected, files[0]['metadata'])
|
|
|
|
|
2019-03-30 01:26:10 +01:00
|
|
|
async def test_setting_stream_fields(self):
|
2019-03-24 21:55:04 +01:00
|
|
|
values = {
|
2019-03-25 14:59:32 +01:00
|
|
|
'title': "Cool Content",
|
|
|
|
'description': "Best content on LBRY.",
|
2019-03-25 17:30:30 +01:00
|
|
|
'thumbnail_url': "https://co.ol/thumbnail.png",
|
2019-04-21 05:54:34 +02:00
|
|
|
'tags': ["cool", "awesome"],
|
2019-03-30 01:26:10 +01:00
|
|
|
'languages': ["en"],
|
2019-05-09 20:06:19 +02:00
|
|
|
'locations': ['US:NH:Manchester:03101:42.990605:-71.460989'],
|
2019-03-25 17:30:30 +01:00
|
|
|
|
|
|
|
'author': "Jules Verne",
|
2019-03-25 14:59:32 +01:00
|
|
|
'license': 'Public Domain',
|
2019-03-25 17:30:30 +01:00
|
|
|
'license_url': "https://co.ol/license",
|
|
|
|
'release_time': 123456,
|
|
|
|
|
2019-03-25 14:59:32 +01:00
|
|
|
'fee_currency': 'usd',
|
|
|
|
'fee_amount': '2.99',
|
|
|
|
'fee_address': 'mmCsWAiXMUVecFQ3fVzUwvpT9XFMXno2Ca',
|
2019-03-24 21:55:04 +01:00
|
|
|
}
|
2019-03-28 01:32:43 +01:00
|
|
|
fixed_values = values.copy()
|
2019-05-09 20:06:19 +02:00
|
|
|
fixed_values['locations'] = [{
|
|
|
|
'country': 'US',
|
|
|
|
'state': 'NH',
|
|
|
|
'city': 'Manchester',
|
|
|
|
'code': '03101',
|
|
|
|
'latitude': '42.990605',
|
|
|
|
'longitude': '-71.460989'
|
|
|
|
}]
|
2019-04-20 07:12:43 +02:00
|
|
|
fixed_values['thumbnail'] = {'url': fixed_values.pop('thumbnail_url')}
|
2019-03-25 14:59:32 +01:00
|
|
|
fixed_values['release_time'] = str(values['release_time'])
|
2019-06-04 20:39:05 +02:00
|
|
|
fixed_values['stream_type'] = 'binary'
|
2019-04-20 07:12:43 +02:00
|
|
|
fixed_values['source'] = {
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-20 07:12:43 +02:00
|
|
|
'media_type': 'application/octet-stream',
|
|
|
|
'size': '3'
|
|
|
|
}
|
2019-03-25 14:59:32 +01:00
|
|
|
fixed_values['fee'] = {
|
2019-03-28 01:32:43 +01:00
|
|
|
'address': fixed_values.pop('fee_address'),
|
2019-05-09 20:06:19 +02:00
|
|
|
'amount': fixed_values.pop('fee_amount'),
|
2019-03-25 14:59:32 +01:00
|
|
|
'currency': fixed_values.pop('fee_currency').upper()
|
|
|
|
}
|
2019-04-20 07:12:43 +02:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# create new stream with all fields set
|
2019-04-20 07:12:43 +02:00
|
|
|
tx = await self.out(self.stream_create('big', **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
stream = tx['outputs'][0]['value']
|
|
|
|
fixed_values['source']['name'] = stream['source']['name']
|
2019-04-20 07:12:43 +02:00
|
|
|
fixed_values['source']['sd_hash'] = stream['source']['sd_hash']
|
2019-03-25 14:59:32 +01:00
|
|
|
self.assertEqual(stream, fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# create stream with nothing set
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.stream_create('light'))
|
2019-04-21 05:54:34 +02:00
|
|
|
stream = tx['outputs'][0]['value']
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(
|
2019-04-21 05:54:34 +02:00
|
|
|
stream, {
|
2019-06-04 20:39:05 +02:00
|
|
|
'stream_type': 'binary',
|
2019-04-20 07:12:43 +02:00
|
|
|
'source': {
|
|
|
|
'size': '3',
|
|
|
|
'media_type': 'application/octet-stream',
|
2019-04-21 05:54:34 +02:00
|
|
|
'name': stream['source']['name'],
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-21 05:54:34 +02:00
|
|
|
'sd_hash': stream['source']['sd_hash']
|
2019-04-20 07:12:43 +02:00
|
|
|
},
|
2019-03-25 14:59:32 +01:00
|
|
|
}
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# create stream with just some tags, langs and locations
|
|
|
|
tx = await self.out(self.stream_create('updated', tags='blah', languages='uk', locations='UA::Kyiv'))
|
2019-03-24 21:55:04 +01:00
|
|
|
txo = tx['outputs'][0]
|
2019-04-21 05:54:34 +02:00
|
|
|
claim_id, stream = txo['claim_id'], txo['value']
|
|
|
|
fixed_values['source']['name'] = stream['source']['name']
|
|
|
|
fixed_values['source']['sd_hash'] = stream['source']['sd_hash']
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(
|
2019-04-21 05:54:34 +02:00
|
|
|
stream, {
|
2019-06-04 20:39:05 +02:00
|
|
|
'stream_type': 'binary',
|
2019-04-20 07:12:43 +02:00
|
|
|
'source': {
|
|
|
|
'size': '3',
|
|
|
|
'media_type': 'application/octet-stream',
|
2019-04-21 05:54:34 +02:00
|
|
|
'name': fixed_values['source']['name'],
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '56bf5dbae43f77a63d075b0f2ae9c7c3e3098db93779c7f9840da0f4db9c2f8c8454f4edd1373e2b64ee2e68350d916e',
|
2019-04-20 07:12:43 +02:00
|
|
|
'sd_hash': fixed_values['source']['sd_hash'],
|
|
|
|
},
|
2019-04-21 05:54:34 +02:00
|
|
|
'tags': ['blah'],
|
|
|
|
'languages': ['uk'],
|
|
|
|
'locations': [{'country': 'UA', 'city': 'Kyiv'}]
|
2019-03-25 14:59:32 +01:00
|
|
|
}
|
2019-03-24 21:55:04 +01:00
|
|
|
)
|
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
# update stream setting all fields, 'source' doesn't change
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.stream_update(claim_id, **values))
|
2019-04-21 05:54:34 +02:00
|
|
|
stream = tx['outputs'][0]['value']
|
2019-03-25 14:59:32 +01:00
|
|
|
fixed_values['tags'].insert(0, 'blah') # existing tag
|
2019-04-21 05:54:34 +02:00
|
|
|
fixed_values['languages'].insert(0, 'uk') # existing language
|
|
|
|
fixed_values['locations'].insert(0, {'country': 'UA', 'city': 'Kyiv'}) # existing location
|
|
|
|
self.assertEqual(stream, fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-04-26 23:10:09 +02:00
|
|
|
# clearing and settings tags, languages and locations
|
2019-04-21 05:54:34 +02:00
|
|
|
tx = await self.out(self.stream_update(
|
|
|
|
claim_id, tags='single', clear_tags=True,
|
|
|
|
languages='pt', clear_languages=True,
|
|
|
|
locations='BR', clear_locations=True,
|
|
|
|
))
|
2019-03-24 21:55:04 +01:00
|
|
|
txo = tx['outputs'][0]
|
2019-03-25 14:59:32 +01:00
|
|
|
fixed_values['tags'] = ['single']
|
2019-04-21 05:54:34 +02:00
|
|
|
fixed_values['languages'] = ['pt']
|
|
|
|
fixed_values['locations'] = [{'country': 'BR'}]
|
2019-04-20 07:12:43 +02:00
|
|
|
self.assertEqual(txo['value'], fixed_values)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-04-22 21:24:51 +02:00
|
|
|
# modifying hash/size/name
|
|
|
|
fixed_values['source']['name'] = 'changed_name'
|
|
|
|
fixed_values['source']['hash'] = 'cafebeef'
|
|
|
|
fixed_values['source']['size'] = '42'
|
|
|
|
tx = await self.out(self.stream_update(
|
|
|
|
claim_id, file_name='changed_name', file_hash='cafebeef', file_size=42
|
|
|
|
))
|
|
|
|
self.assertEqual(tx['outputs'][0]['value'], fixed_values)
|
|
|
|
|
2019-05-09 05:35:25 +02:00
|
|
|
# stream_update re-signs with the same channel
|
2019-06-24 01:58:41 +02:00
|
|
|
channel_id = self.get_claim_id(await self.channel_create('@chan'))
|
2019-05-09 05:35:25 +02:00
|
|
|
tx = await self.stream_update(claim_id, channel_id=channel_id)
|
|
|
|
self.assertEqual(tx['outputs'][0]['signing_channel']['name'], '@chan')
|
|
|
|
tx = await self.stream_update(claim_id, title='channel re-signs')
|
|
|
|
self.assertEqual(tx['outputs'][0]['value']['title'], 'channel re-signs')
|
|
|
|
self.assertEqual(tx['outputs'][0]['signing_channel']['name'], '@chan')
|
|
|
|
|
2019-03-25 14:59:32 +01:00
|
|
|
# send claim to someone else
|
2019-04-06 21:55:08 +02:00
|
|
|
new_account = await self.out(self.daemon.jsonrpc_account_create('second account'))
|
2019-09-20 06:05:37 +02:00
|
|
|
account2_id, account2 = new_account['id'], self.wallet.get_account_or_error(new_account['id'])
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# before sending
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 4)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(account_id=self.account.id), 4)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(account_id=account2_id), 0)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
other_address = await account2.receiving.get_or_create_usable_address()
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.out(self.stream_update(claim_id, claim_address=other_address))
|
2019-03-24 21:55:04 +01:00
|
|
|
|
|
|
|
# after sending
|
2019-10-26 05:34:44 +02:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(), 4)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(account_id=self.account.id), 3)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_claim_list(account_id=account2_id), 1)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2021-09-09 23:12:12 +02:00
|
|
|
self.assertEqual(3, len(await self.claim_search(release_time='>0', order_by=['release_time'])))
|
|
|
|
self.assertEqual(3, len(await self.claim_search(release_time='>=0', order_by=['release_time'])))
|
|
|
|
self.assertEqual(4, len(await self.claim_search(order_by=['release_time'])))
|
|
|
|
self.assertEqual(3, len(await self.claim_search(claim_type='stream', order_by=['release_time'])))
|
|
|
|
self.assertEqual(1, len(await self.claim_search(claim_type='channel', order_by=['release_time'])))
|
|
|
|
self.assertEqual(1, len(await self.claim_search(release_time='>=123456', order_by=['release_time'])))
|
|
|
|
self.assertEqual(1, len(await self.claim_search(release_time='>123456', order_by=['release_time'])))
|
|
|
|
self.assertEqual(2, len(await self.claim_search(release_time='<123457', order_by=['release_time'])))
|
|
|
|
|
2021-12-02 21:32:09 +01:00
|
|
|
self.assertEqual(2, len(await self.claim_search(release_time=['<123457'], order_by=['release_time'])))
|
|
|
|
self.assertEqual(2, len(await self.claim_search(release_time=['>0', '<123457'], order_by=['release_time'])))
|
|
|
|
self.assertEqual(
|
|
|
|
2, len(await self.claim_search(release_time=['>=123097', '<123457'], order_by=['release_time']))
|
|
|
|
)
|
2021-12-03 02:38:21 +01:00
|
|
|
self.assertEqual(
|
|
|
|
2, len(await self.claim_search(release_time=['<123457', '>0'], order_by=['release_time']))
|
|
|
|
)
|
2021-12-02 21:32:09 +01:00
|
|
|
|
2019-06-19 11:33:47 +02:00
|
|
|
async def test_setting_fee_fields(self):
|
|
|
|
tx = await self.out(self.stream_create('paid-stream'))
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
claim_id, stream = txo['claim_id'], txo['value']
|
|
|
|
fee_address = 'mmCsWAiXMUVecFQ3fVzUwvpT9XFMXno2Ca'
|
|
|
|
|
|
|
|
self.assertNotIn('fee', stream)
|
|
|
|
|
|
|
|
# --replace=false
|
|
|
|
# validation
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee currency'):
|
|
|
|
await self.stream_update(claim_id, fee_amount='0.1')
|
|
|
|
with self.assertRaisesRegex(Exception, 'unknown currency provided: foo'):
|
|
|
|
await self.stream_update(claim_id, fee_amount='0.1', fee_currency="foo")
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee amount'):
|
|
|
|
await self.stream_update(claim_id, fee_currency='usd')
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee amount'):
|
|
|
|
await self.stream_update(claim_id, fee_address=fee_address)
|
|
|
|
# set just amount and currency with default address
|
|
|
|
tx = await self.stream_update(
|
|
|
|
claim_id, fee_amount='0.99', fee_currency='lbc'
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.99', 'currency': 'LBC', 'address': txo['address']}
|
|
|
|
)
|
|
|
|
# set all fee fields
|
|
|
|
tx = await self.stream_update(
|
|
|
|
claim_id, fee_amount='0.1', fee_currency='usd', fee_address=fee_address
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.1', 'currency': 'USD', 'address': fee_address}
|
|
|
|
)
|
|
|
|
# change just address
|
|
|
|
tx = await self.stream_update(claim_id, fee_address=txo['address'])
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.1', 'currency': 'USD', 'address': txo['address']}
|
|
|
|
)
|
|
|
|
# change just amount (does not reset fee_address)
|
|
|
|
tx = await self.stream_update(claim_id, fee_amount='0.2')
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.2', 'currency': 'USD', 'address': txo['address']}
|
|
|
|
)
|
|
|
|
# changing currency without an amount is never allowed, even if previous amount exists
|
|
|
|
with self.assertRaises(Exception, msg='In order to set a fee currency, please specify a fee amount'):
|
|
|
|
await self.stream_update(claim_id, fee_currency='usd')
|
|
|
|
# clearing fee
|
|
|
|
tx = await self.out(self.stream_update(claim_id, clear_fee=True))
|
|
|
|
self.assertNotIn('fee', tx['outputs'][0]['value'])
|
|
|
|
|
|
|
|
# --replace=true
|
|
|
|
# set just amount and currency with default address
|
|
|
|
tx = await self.stream_update(
|
|
|
|
claim_id, fee_amount='0.99', fee_currency='lbc', replace=True
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.99', 'currency': 'LBC', 'address': txo['address']}
|
|
|
|
)
|
|
|
|
# set all fee fields
|
|
|
|
tx = await self.stream_update(
|
|
|
|
claim_id, fee_amount='0.1', fee_currency='usd', fee_address=fee_address, replace=True
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
tx['outputs'][0]['value']['fee'],
|
|
|
|
{'amount': '0.1', 'currency': 'USD', 'address': fee_address}
|
|
|
|
)
|
|
|
|
# validation
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee currency'):
|
|
|
|
await self.stream_update(claim_id, fee_amount='0.1', replace=True)
|
|
|
|
with self.assertRaisesRegex(Exception, 'unknown currency provided: foo'):
|
|
|
|
await self.stream_update(claim_id, fee_amount='0.1', fee_currency="foo", replace=True)
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee amount'):
|
|
|
|
await self.stream_update(claim_id, fee_currency='usd', replace=True)
|
|
|
|
with self.assertRaisesRegex(Exception, 'please specify a fee amount'):
|
|
|
|
await self.stream_update(claim_id, fee_address=fee_address, replace=True)
|
|
|
|
|
2019-04-22 04:12:02 +02:00
|
|
|
async def test_automatic_type_and_metadata_detection_for_image(self):
|
2019-10-25 15:57:58 +02:00
|
|
|
txo = (await self.stream_create('blank-image', data=self.image_data, suffix='.png'))['outputs'][0]
|
2019-06-04 06:10:59 +02:00
|
|
|
self.assertEqual(
|
|
|
|
txo['value'], {
|
|
|
|
'source': {
|
|
|
|
'size': '99',
|
|
|
|
'name': txo['value']['source']['name'],
|
|
|
|
'media_type': 'image/png',
|
|
|
|
'hash': '6c7df435d412c603390f593ef658c199817c7830ba3f16b7eadd8f99fa50e85dbd0d2b3dc61eadc33fe096e3872d1545',
|
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
},
|
|
|
|
'stream_type': 'image',
|
|
|
|
'image': {
|
|
|
|
'width': 5,
|
|
|
|
'height': 7
|
2019-04-22 04:12:02 +02:00
|
|
|
}
|
2019-06-04 06:10:59 +02:00
|
|
|
}
|
|
|
|
)
|
2019-04-22 04:12:02 +02:00
|
|
|
|
|
|
|
async def test_automatic_type_and_metadata_detection_for_video(self):
|
2019-10-25 15:57:58 +02:00
|
|
|
txo = (await self.stream_create('chrome', file_path=self.video_file_name))['outputs'][0]
|
2019-04-21 05:54:34 +02:00
|
|
|
self.assertEqual(
|
|
|
|
txo['value'], {
|
|
|
|
'source': {
|
|
|
|
'size': '2299653',
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-21 05:54:34 +02:00
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
},
|
2019-04-22 04:12:02 +02:00
|
|
|
'stream_type': 'video',
|
2019-04-21 05:54:34 +02:00
|
|
|
'video': {
|
|
|
|
'width': 1280,
|
|
|
|
'height': 720,
|
|
|
|
'duration': 15
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
async def test_overriding_automatic_metadata_detection(self):
|
|
|
|
tx = await self.out(
|
|
|
|
self.daemon.jsonrpc_stream_create(
|
|
|
|
'chrome', '1.0', file_path=self.video_file_name, width=99, height=88, duration=9
|
|
|
|
)
|
|
|
|
)
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
self.assertEqual(
|
|
|
|
txo['value'], {
|
|
|
|
'source': {
|
|
|
|
'size': '2299653',
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-21 05:54:34 +02:00
|
|
|
'sd_hash': txo['value']['source']['sd_hash'],
|
|
|
|
},
|
2019-04-22 04:12:02 +02:00
|
|
|
'stream_type': 'video',
|
2019-04-21 05:54:34 +02:00
|
|
|
'video': {
|
|
|
|
'width': 99,
|
|
|
|
'height': 88,
|
|
|
|
'duration': 9
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2019-10-04 11:29:35 +02:00
|
|
|
async def test_update_file_type(self):
|
2019-10-25 15:57:58 +02:00
|
|
|
video_txo = (await self.stream_create('chrome', file_path=self.video_file_name))['outputs'][0]
|
|
|
|
self.assertSetEqual(set(video_txo['value']), {'source', 'stream_type', 'video'})
|
2019-10-04 11:29:35 +02:00
|
|
|
self.assertEqual(video_txo['value']['stream_type'], 'video')
|
|
|
|
self.assertEqual(video_txo['value']['source']['media_type'], 'video/mp4')
|
|
|
|
self.assertEqual(
|
|
|
|
video_txo['value']['video'], {
|
|
|
|
'duration': 15,
|
|
|
|
'height': 720,
|
|
|
|
'width': 1280
|
|
|
|
}
|
|
|
|
)
|
2019-10-25 15:57:58 +02:00
|
|
|
claim_id = video_txo['claim_id']
|
2019-10-04 11:29:35 +02:00
|
|
|
|
2019-10-25 15:57:58 +02:00
|
|
|
binary_txo = (await self.stream_update(claim_id, data=b'hi!'))['outputs'][0]
|
|
|
|
self.assertEqual(binary_txo['value']['stream_type'], 'binary')
|
|
|
|
self.assertEqual(binary_txo['value']['source']['media_type'], 'application/octet-stream')
|
|
|
|
self.assertSetEqual(set(binary_txo['value']), {'source', 'stream_type'})
|
|
|
|
|
|
|
|
image_txo = (await self.stream_update(claim_id, data=self.image_data, suffix='.png'))['outputs'][0]
|
|
|
|
self.assertSetEqual(set(image_txo['value']), {'source', 'stream_type', 'image'})
|
2019-10-04 11:29:35 +02:00
|
|
|
self.assertEqual(image_txo['value']['stream_type'], 'image')
|
|
|
|
self.assertEqual(image_txo['value']['source']['media_type'], 'image/png')
|
2019-10-25 15:57:58 +02:00
|
|
|
self.assertEqual(image_txo['value']['image'], {'height': 7, 'width': 5})
|
2019-10-04 11:29:35 +02:00
|
|
|
|
2019-04-27 02:44:38 +02:00
|
|
|
async def test_replace_mode_preserves_source_and_type(self):
|
|
|
|
expected = {
|
|
|
|
'tags': ['blah'],
|
|
|
|
'languages': ['uk'],
|
|
|
|
'locations': [{'country': 'UA', 'city': 'Kyiv'}],
|
|
|
|
'source': {
|
|
|
|
'size': '2299653',
|
|
|
|
'name': 'ForBiggerEscapes.mp4',
|
|
|
|
'media_type': 'video/mp4',
|
2019-04-29 21:18:28 +02:00
|
|
|
'hash': '5f6811c83c1616df06f10bf5309ca61edb5ff949a9c1212ce784602d837bfdfc1c3db1e0580ef7bd1dadde41d8acf315',
|
2019-04-27 02:44:38 +02:00
|
|
|
},
|
|
|
|
'stream_type': 'video',
|
|
|
|
'video': {
|
|
|
|
'width': 1280,
|
|
|
|
'height': 720,
|
|
|
|
'duration': 15
|
|
|
|
}
|
|
|
|
}
|
2019-05-09 05:06:33 +02:00
|
|
|
channel = await self.channel_create('@chan')
|
2019-04-27 02:44:38 +02:00
|
|
|
tx = await self.out(self.daemon.jsonrpc_stream_create(
|
|
|
|
'chrome', '1.0', file_path=self.video_file_name,
|
2019-05-09 05:06:33 +02:00
|
|
|
tags='blah', languages='uk', locations='UA::Kyiv',
|
2019-06-24 01:58:41 +02:00
|
|
|
channel_id=self.get_claim_id(channel)
|
2019-04-27 02:44:38 +02:00
|
|
|
))
|
|
|
|
await self.on_transaction_dict(tx)
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
expected['source']['sd_hash'] = txo['value']['source']['sd_hash']
|
|
|
|
self.assertEqual(txo['value'], expected)
|
2019-05-09 05:06:33 +02:00
|
|
|
self.assertEqual(txo['signing_channel']['name'], '@chan')
|
2019-04-27 02:44:38 +02:00
|
|
|
tx = await self.out(self.daemon.jsonrpc_stream_update(
|
|
|
|
txo['claim_id'], title='new title', replace=True
|
|
|
|
))
|
|
|
|
txo = tx['outputs'][0]
|
|
|
|
expected['title'] = 'new title'
|
|
|
|
del expected['tags']
|
|
|
|
del expected['languages']
|
|
|
|
del expected['locations']
|
|
|
|
self.assertEqual(txo['value'], expected)
|
2019-05-09 05:06:33 +02:00
|
|
|
self.assertNotIn('signing_channel', txo)
|
2019-04-27 02:44:38 +02:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
async def test_create_update_and_abandon_stream(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
await self.assertBalance(self.account, '10.0')
|
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create(bid='2.5') # creates new claim
|
2019-06-24 01:58:41 +02:00
|
|
|
claim_id = self.get_claim_id(tx)
|
2020-03-07 06:34:47 +01:00
|
|
|
txs = await self.transaction_list()
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(len(txs[0]['claim_info']), 1)
|
|
|
|
self.assertEqual(txs[0]['confirmations'], 1)
|
|
|
|
self.assertEqual(txs[0]['claim_info'][0]['balance_delta'], '-2.5')
|
|
|
|
self.assertEqual(txs[0]['claim_info'][0]['claim_id'], claim_id)
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertFalse(txs[0]['claim_info'][0]['is_spent'])
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
|
|
|
self.assertEqual(txs[0]['fee'], '-0.020107')
|
|
|
|
await self.assertBalance(self.account, '7.479893')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-05-10 17:24:04 +02:00
|
|
|
|
|
|
|
await self.daemon.jsonrpc_file_delete(delete_all=True)
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 0)
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
await self.stream_update(claim_id, bid='1.0') # updates previous claim
|
2020-03-07 06:34:47 +01:00
|
|
|
txs = await self.transaction_list()
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(len(txs[0]['update_info']), 1)
|
|
|
|
self.assertEqual(txs[0]['update_info'][0]['balance_delta'], '1.5')
|
|
|
|
self.assertEqual(txs[0]['update_info'][0]['claim_id'], claim_id)
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertFalse(txs[0]['update_info'][0]['is_spent'])
|
|
|
|
self.assertTrue(txs[1]['claim_info'][0]['is_spent'])
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
2019-04-29 21:18:28 +02:00
|
|
|
self.assertEqual(txs[0]['fee'], '-0.0002165')
|
|
|
|
await self.assertBalance(self.account, '8.9796765')
|
2019-03-24 21:55:04 +01:00
|
|
|
|
2019-03-30 02:41:24 +01:00
|
|
|
await self.stream_abandon(claim_id)
|
2020-03-07 06:34:47 +01:00
|
|
|
txs = await self.transaction_list()
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(len(txs[0]['abandon_info']), 1)
|
|
|
|
self.assertEqual(txs[0]['abandon_info'][0]['balance_delta'], '1.0')
|
|
|
|
self.assertEqual(txs[0]['abandon_info'][0]['claim_id'], claim_id)
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertTrue(txs[1]['update_info'][0]['is_spent'])
|
|
|
|
self.assertTrue(txs[2]['claim_info'][0]['is_spent'])
|
2019-03-24 21:55:04 +01:00
|
|
|
self.assertEqual(txs[0]['value'], '0.0')
|
|
|
|
self.assertEqual(txs[0]['fee'], '-0.000107')
|
2019-04-29 21:18:28 +02:00
|
|
|
await self.assertBalance(self.account, '9.9795695')
|
2019-02-11 23:45:52 +01:00
|
|
|
|
2019-04-21 05:54:34 +02:00
|
|
|
async def test_abandoning_stream_at_loss(self):
|
2019-02-11 23:45:52 +01:00
|
|
|
await self.assertBalance(self.account, '10.0')
|
2019-03-26 03:06:36 +01:00
|
|
|
tx = await self.stream_create(bid='0.0001')
|
2019-02-11 23:45:52 +01:00
|
|
|
await self.assertBalance(self.account, '9.979793')
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.stream_abandon(self.get_claim_id(tx))
|
2019-02-11 23:45:52 +01:00
|
|
|
await self.assertBalance(self.account, '9.97968399')
|
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
async def test_publish(self):
|
|
|
|
|
|
|
|
# errors on missing arguments to create a stream
|
|
|
|
with self.assertRaisesRegex(Exception, "'bid' is a required argument for new publishes."):
|
|
|
|
await self.daemon.jsonrpc_publish('foo')
|
|
|
|
|
|
|
|
# successfully create stream
|
|
|
|
with tempfile.NamedTemporaryFile() as file:
|
|
|
|
file.write(b'hi')
|
|
|
|
file.flush()
|
|
|
|
tx1 = await self.publish('foo', bid='1.0', file_path=file.name)
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-04-16 15:27:19 +02:00
|
|
|
|
2019-03-27 21:02:17 +01:00
|
|
|
# doesn't error on missing arguments when doing an update stream
|
|
|
|
tx2 = await self.publish('foo', tags='updated')
|
2019-04-16 15:27:19 +02:00
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 1)
|
2019-06-24 01:58:41 +02:00
|
|
|
self.assertEqual(self.get_claim_id(tx1), self.get_claim_id(tx2))
|
2019-03-27 21:02:17 +01:00
|
|
|
|
|
|
|
# update conflict with two claims of the same name
|
|
|
|
tx3 = await self.stream_create('foo', allow_duplicate_name=True)
|
|
|
|
with self.assertRaisesRegex(Exception, "There are 2 claims for 'foo'"):
|
|
|
|
await self.daemon.jsonrpc_publish('foo')
|
|
|
|
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 2)
|
2019-03-30 02:41:24 +01:00
|
|
|
# abandon duplicate stream
|
2019-06-24 01:58:41 +02:00
|
|
|
await self.stream_abandon(self.get_claim_id(tx3))
|
2019-03-27 21:02:17 +01:00
|
|
|
|
|
|
|
# publish to a channel
|
|
|
|
await self.channel_create('@abc')
|
|
|
|
tx3 = await self.publish('foo', channel_name='@abc')
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 2)
|
2019-03-27 21:02:17 +01:00
|
|
|
r = await self.resolve('lbry://@abc/foo')
|
|
|
|
self.assertEqual(
|
2020-02-22 03:59:46 +01:00
|
|
|
r['claim_id'],
|
2019-06-24 01:58:41 +02:00
|
|
|
self.get_claim_id(tx3)
|
2019-03-27 21:02:17 +01:00
|
|
|
)
|
|
|
|
|
2019-05-09 05:35:25 +02:00
|
|
|
# publishing again clears channel
|
2019-09-25 12:28:02 +02:00
|
|
|
tx4 = await self.publish('foo', languages='uk-UA', tags=['Anime', 'anime '])
|
2019-10-30 03:56:28 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 2)
|
2020-02-22 03:59:46 +01:00
|
|
|
claim = await self.resolve('lbry://foo')
|
2019-03-30 01:52:27 +01:00
|
|
|
self.assertEqual(claim['txid'], tx4['outputs'][0]['txid'])
|
2019-04-29 06:38:58 +02:00
|
|
|
self.assertNotIn('signing_channel', claim)
|
2019-04-20 07:12:43 +02:00
|
|
|
self.assertEqual(claim['value']['languages'], ['uk-UA'])
|
2019-09-25 12:28:02 +02:00
|
|
|
self.assertEqual(claim['value']['tags'], ['anime'])
|
2019-03-30 01:52:27 +01:00
|
|
|
|
2021-03-08 20:39:22 +01:00
|
|
|
# publish a stream with no source
|
|
|
|
tx5 = await self.publish(
|
2021-03-10 22:44:37 +01:00
|
|
|
'future-release', bid='0.1', languages='uk-UA', tags=['Anime', 'anime ']
|
2021-03-08 20:39:22 +01:00
|
|
|
)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 2)
|
|
|
|
claim = await self.resolve('lbry://future-release')
|
|
|
|
self.assertEqual(claim['txid'], tx5['outputs'][0]['txid'])
|
|
|
|
self.assertNotIn('signing_channel', claim)
|
|
|
|
self.assertEqual(claim['value']['languages'], ['uk-UA'])
|
|
|
|
self.assertEqual(claim['value']['tags'], ['anime'])
|
|
|
|
self.assertNotIn('source', claim['value'])
|
|
|
|
|
2021-03-11 03:18:50 +01:00
|
|
|
# change metadata before the release
|
|
|
|
await self.publish(
|
|
|
|
'future-release', bid='0.1', tags=['Anime', 'anime ', 'psy-trance'], title='Psy will be over 9000!!!'
|
|
|
|
)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_file_list(), 2)
|
|
|
|
claim = await self.resolve('lbry://future-release')
|
|
|
|
self.assertEqual(claim['value']['tags'], ['anime', 'psy-trance'])
|
|
|
|
self.assertEqual(claim['value']['title'], 'Psy will be over 9000!!!')
|
|
|
|
self.assertNotIn('source', claim['value'])
|
|
|
|
|
2021-03-08 20:39:22 +01:00
|
|
|
# update the stream to have a source
|
2021-04-15 01:15:33 +02:00
|
|
|
tx6 = await self.publish(
|
|
|
|
'future-release', sd_hash='beef', file_hash='beef',
|
|
|
|
file_name='blah.mp3', tags=['something-else']
|
|
|
|
)
|
2021-03-08 20:39:22 +01:00
|
|
|
claim = await self.resolve('lbry://future-release')
|
|
|
|
self.assertEqual(claim['txid'], tx6['outputs'][0]['txid'])
|
|
|
|
self.assertEqual(claim['value']['tags'], ['something-else'])
|
2021-04-15 01:15:33 +02:00
|
|
|
self.assertEqual(claim['value']['source']['sd_hash'], 'beef')
|
|
|
|
self.assertEqual(claim['value']['source']['hash'], 'beef')
|
|
|
|
self.assertEqual(claim['value']['source']['name'], 'blah.mp3')
|
|
|
|
self.assertEqual(claim['value']['source']['media_type'], 'audio/mpeg')
|
2021-03-08 20:39:22 +01:00
|
|
|
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
class SupportCommands(CommandTestCase):
|
|
|
|
|
|
|
|
async def test_regular_supports_and_tip_supports(self):
|
2019-09-23 19:06:25 +02:00
|
|
|
wallet2 = await self.daemon.jsonrpc_wallet_create('wallet2', create_account=True)
|
2019-09-20 06:05:37 +02:00
|
|
|
account2 = wallet2.accounts[0]
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
# send account2 5 LBC out of the 10 LBC in account1
|
|
|
|
result = await self.out(self.daemon.jsonrpc_account_send(
|
2019-09-20 06:05:37 +02:00
|
|
|
'5.0', await self.daemon.jsonrpc_address_unused(wallet_id='wallet2')
|
2019-03-26 03:06:36 +01:00
|
|
|
))
|
|
|
|
await self.on_transaction_dict(result)
|
|
|
|
|
|
|
|
# account1 and account2 balances:
|
|
|
|
await self.assertBalance(self.account, '4.999876')
|
|
|
|
await self.assertBalance(account2, '5.0')
|
|
|
|
|
|
|
|
# create the claim we'll be tipping and supporting
|
2019-08-12 06:40:05 +02:00
|
|
|
claim_id = self.get_claim_id(await self.stream_create())
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
# account1 and account2 balances:
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
await self.assertBalance(account2, '5.0')
|
|
|
|
|
|
|
|
# send a tip to the claim using account2
|
|
|
|
tip = await self.out(
|
2019-08-12 06:40:05 +02:00
|
|
|
self.daemon.jsonrpc_support_create(
|
2020-06-05 22:08:25 +02:00
|
|
|
claim_id, '1.0', True, account_id=account2.id, wallet_id='wallet2',
|
2021-12-20 20:46:01 +01:00
|
|
|
funding_account_ids=[account2.id], blocking=True)
|
2019-03-26 03:06:36 +01:00
|
|
|
)
|
2019-08-12 06:40:05 +02:00
|
|
|
await self.confirm_tx(tip['txid'])
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
# tips don't affect balance so account1 balance is same but account2 balance went down
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
await self.assertBalance(account2, '3.9998585')
|
|
|
|
|
|
|
|
# verify that the incoming tip is marked correctly as is_tip=True in account1
|
2020-03-07 06:34:47 +01:00
|
|
|
txs = await self.transaction_list(account_id=self.account.id)
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(len(txs[0]['support_info']), 1)
|
|
|
|
self.assertEqual(txs[0]['support_info'][0]['balance_delta'], '1.0')
|
|
|
|
self.assertEqual(txs[0]['support_info'][0]['claim_id'], claim_id)
|
2019-10-02 18:58:51 +02:00
|
|
|
self.assertTrue(txs[0]['support_info'][0]['is_tip'])
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertFalse(txs[0]['support_info'][0]['is_spent'])
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(txs[0]['value'], '1.0')
|
|
|
|
self.assertEqual(txs[0]['fee'], '0.0')
|
|
|
|
|
|
|
|
# verify that the outgoing tip is marked correctly as is_tip=True in account2
|
2020-03-07 06:34:47 +01:00
|
|
|
txs2 = await self.transaction_list(wallet_id='wallet2', account_id=account2.id)
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(len(txs2[0]['support_info']), 1)
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['balance_delta'], '-1.0')
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['claim_id'], claim_id)
|
2019-10-02 18:58:51 +02:00
|
|
|
self.assertTrue(txs2[0]['support_info'][0]['is_tip'])
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertFalse(txs2[0]['support_info'][0]['is_spent'])
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(txs2[0]['value'], '-1.0')
|
|
|
|
self.assertEqual(txs2[0]['fee'], '-0.0001415')
|
|
|
|
|
|
|
|
# send a support to the claim using account2
|
|
|
|
support = await self.out(
|
2019-08-12 06:40:05 +02:00
|
|
|
self.daemon.jsonrpc_support_create(
|
2020-06-05 22:08:25 +02:00
|
|
|
claim_id, '2.0', False, account_id=account2.id, wallet_id='wallet2',
|
2021-12-20 20:46:01 +01:00
|
|
|
funding_account_ids=[account2.id], blocking=True)
|
2019-03-26 03:06:36 +01:00
|
|
|
)
|
2019-08-12 06:40:05 +02:00
|
|
|
await self.confirm_tx(support['txid'])
|
2019-03-26 03:06:36 +01:00
|
|
|
|
|
|
|
# account2 balance went down ~2
|
|
|
|
await self.assertBalance(self.account, '3.979769')
|
|
|
|
await self.assertBalance(account2, '1.999717')
|
|
|
|
|
|
|
|
# verify that the outgoing support is marked correctly as is_tip=False in account2
|
2020-03-07 06:34:47 +01:00
|
|
|
txs2 = await self.transaction_list(wallet_id='wallet2')
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(len(txs2[0]['support_info']), 1)
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['balance_delta'], '-2.0')
|
|
|
|
self.assertEqual(txs2[0]['support_info'][0]['claim_id'], claim_id)
|
2019-10-02 18:58:51 +02:00
|
|
|
self.assertFalse(txs2[0]['support_info'][0]['is_tip'])
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertFalse(txs2[0]['support_info'][0]['is_spent'])
|
2019-03-26 03:06:36 +01:00
|
|
|
self.assertEqual(txs2[0]['value'], '0.0')
|
|
|
|
self.assertEqual(txs2[0]['fee'], '-0.0001415')
|
2019-11-13 23:50:35 +01:00
|
|
|
|
2020-03-07 02:12:38 +01:00
|
|
|
# abandoning the tip increases balance and shows tip as spent
|
|
|
|
await self.support_abandon(claim_id)
|
|
|
|
await self.assertBalance(self.account, '4.979662')
|
2020-03-07 06:34:47 +01:00
|
|
|
txs = await self.transaction_list(account_id=self.account.id)
|
2020-03-07 02:12:38 +01:00
|
|
|
self.assertEqual(len(txs[0]['abandon_info']), 1)
|
|
|
|
self.assertEqual(len(txs[1]['support_info']), 1)
|
|
|
|
self.assertTrue(txs[1]['support_info'][0]['is_tip'])
|
|
|
|
self.assertTrue(txs[1]['support_info'][0]['is_spent'])
|
|
|
|
|
2021-03-01 16:18:47 +01:00
|
|
|
async def test_signed_supports_with_no_change_txo_regression(self):
|
|
|
|
# reproduces a bug where transactions did not get properly signed
|
|
|
|
# if there was no change and just a single output
|
|
|
|
# lbrycrd returned 'the transaction was rejected by network rules.'
|
2020-06-05 21:19:14 +02:00
|
|
|
channel_id = self.get_claim_id(await self.channel_create())
|
|
|
|
stream_id = self.get_claim_id(await self.stream_create())
|
2021-06-01 17:42:16 +02:00
|
|
|
tx = await self.support_create(stream_id, '7.967601', channel_id=channel_id)
|
2021-03-01 16:18:47 +01:00
|
|
|
self.assertEqual(len(tx['outputs']), 1) # must be one to reproduce bug
|
2020-06-05 21:19:14 +02:00
|
|
|
self.assertTrue(tx['outputs'][0]['is_channel_signature_valid'])
|
|
|
|
|
2019-11-13 23:50:35 +01:00
|
|
|
|
|
|
|
class CollectionCommands(CommandTestCase):
|
|
|
|
|
|
|
|
async def test_collections(self):
|
|
|
|
claim_ids = [
|
|
|
|
self.get_claim_id(tx) for tx in [
|
|
|
|
await self.stream_create('stream-one'),
|
|
|
|
await self.stream_create('stream-two')
|
|
|
|
]
|
|
|
|
]
|
|
|
|
claim_ids.append(claim_ids[0])
|
|
|
|
claim_ids.append('beef')
|
|
|
|
tx = await self.collection_create('radjingles', claims=claim_ids, title="boring title")
|
|
|
|
claim_id = self.get_claim_id(tx)
|
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list())
|
|
|
|
self.assertEqual(collections['items'][0]['value']['title'], 'boring title')
|
|
|
|
self.assertEqual(collections['items'][0]['value']['claims'], claim_ids)
|
2019-11-15 00:01:46 +01:00
|
|
|
self.assertEqual(collections['items'][0]['value_type'], 'collection')
|
2019-11-13 23:50:35 +01:00
|
|
|
|
|
|
|
self.assertItemCount(collections, 1)
|
|
|
|
await self.assertBalance(self.account, '6.939679')
|
|
|
|
|
|
|
|
with self.assertRaisesRegex(Exception, "You already have a collection under the name 'radjingles'."):
|
|
|
|
await self.collection_create('radjingles', claims=claim_ids)
|
|
|
|
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_collection_list(), 1)
|
|
|
|
await self.assertBalance(self.account, '6.939679')
|
|
|
|
|
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list())
|
|
|
|
self.assertEqual(collections['items'][0]['value']['title'], 'boring title')
|
|
|
|
await self.collection_update(claim_id, title='fancy title')
|
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list())
|
|
|
|
self.assertEqual(collections['items'][0]['value']['title'], 'fancy title')
|
|
|
|
self.assertEqual(collections['items'][0]['value']['claims'], claim_ids)
|
|
|
|
self.assertNotIn('claims', collections['items'][0])
|
|
|
|
|
2019-11-14 03:16:27 +01:00
|
|
|
tx = await self.collection_create('radjingles', claims=claim_ids, allow_duplicate_name=True)
|
|
|
|
claim_id2 = self.get_claim_id(tx)
|
2019-11-13 23:50:35 +01:00
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_collection_list(), 2)
|
2021-05-14 18:25:40 +02:00
|
|
|
# with clear_claims
|
2019-11-14 19:25:33 +01:00
|
|
|
await self.collection_update(claim_id, clear_claims=True, claims=claim_ids[:2])
|
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list())
|
2021-05-15 08:26:46 +02:00
|
|
|
self.assertEqual(len(collections['items']), 2)
|
2021-03-04 04:14:48 +01:00
|
|
|
self.assertNotIn('canonical_url', collections['items'][0])
|
2019-11-14 19:25:33 +01:00
|
|
|
|
2021-03-04 04:14:48 +01:00
|
|
|
resolved_collections = await self.out(self.daemon.jsonrpc_collection_list(resolve=True))
|
|
|
|
self.assertIn('canonical_url', resolved_collections['items'][0])
|
2021-05-14 18:25:40 +02:00
|
|
|
# with replace
|
2021-05-15 08:26:46 +02:00
|
|
|
await self.collection_update(claim_id, replace=True, claims=claim_ids[::-1][:2], tags=['cool'])
|
|
|
|
updated = await self.claim_search(claim_id=claim_id)
|
|
|
|
self.assertEqual(updated[0]['value']['tags'], ['cool'])
|
|
|
|
self.assertEqual(updated[0]['value']['claims'], claim_ids[::-1][:2])
|
|
|
|
await self.collection_update(claim_id, replace=True, claims=claim_ids[:4], languages=['en', 'pt-BR'])
|
|
|
|
updated = await self.resolve(f'radjingles:{claim_id}')
|
|
|
|
self.assertEqual(updated['value']['claims'], claim_ids[:4])
|
|
|
|
self.assertNotIn('tags', updated['value'])
|
|
|
|
self.assertEqual(updated['value']['languages'], ['en', 'pt-BR'])
|
|
|
|
|
2019-11-13 23:50:35 +01:00
|
|
|
await self.collection_abandon(claim_id)
|
|
|
|
self.assertItemCount(await self.daemon.jsonrpc_collection_list(), 1)
|
|
|
|
|
2019-11-14 03:16:27 +01:00
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list(resolve_claims=2))
|
2021-05-15 08:26:46 +02:00
|
|
|
self.assertEqual(len(collections['items'][0]['claims']), 2)
|
2019-11-14 03:16:27 +01:00
|
|
|
|
|
|
|
collections = await self.out(self.daemon.jsonrpc_collection_list(resolve_claims=10))
|
|
|
|
self.assertEqual(len(collections['items'][0]['claims']), 4)
|
2019-11-13 23:50:35 +01:00
|
|
|
self.assertEqual(collections['items'][0]['claims'][0]['name'], 'stream-one')
|
|
|
|
self.assertEqual(collections['items'][0]['claims'][1]['name'], 'stream-two')
|
|
|
|
self.assertEqual(collections['items'][0]['claims'][2]['name'], 'stream-one')
|
|
|
|
self.assertIsNone(collections['items'][0]['claims'][3])
|
|
|
|
|
|
|
|
claims = await self.out(self.daemon.jsonrpc_claim_list())
|
|
|
|
self.assertEqual(claims['items'][0]['name'], 'radjingles')
|
|
|
|
self.assertEqual(claims['items'][1]['name'], 'stream-two')
|
|
|
|
self.assertEqual(claims['items'][2]['name'], 'stream-one')
|
2019-11-14 03:16:27 +01:00
|
|
|
|
|
|
|
claims = await self.out(self.daemon.jsonrpc_collection_resolve(claim_id2))
|
|
|
|
self.assertEqual(claims['items'][0]['name'], 'stream-one')
|
|
|
|
self.assertEqual(claims['items'][1]['name'], 'stream-two')
|
2019-11-14 19:25:33 +01:00
|
|
|
self.assertEqual(claims['items'][2]['name'], 'stream-one')
|
|
|
|
|
2019-11-15 00:01:46 +01:00
|
|
|
claims = await self.out(self.daemon.jsonrpc_collection_resolve(claim_id2, page=10))
|
|
|
|
self.assertEqual(claims['items'], [])
|