2019-03-31 00:40:01 +01:00
|
|
|
import unittest
|
2019-05-24 05:55:57 +02:00
|
|
|
import ecdsa
|
|
|
|
import hashlib
|
2019-07-16 18:26:28 +02:00
|
|
|
import logging
|
2019-05-26 05:06:22 +02:00
|
|
|
from binascii import hexlify
|
2019-03-31 00:40:01 +01:00
|
|
|
from torba.client.constants import COIN, NULL_HASH32
|
|
|
|
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.schema.claim import Claim
|
2019-07-11 19:29:26 +02:00
|
|
|
from lbry.wallet.server.db import reader, writer
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.wallet.server.coin import LBCRegTest
|
2019-07-11 19:29:26 +02:00
|
|
|
from lbry.wallet.server.db.trending import TRENDING_WINDOW
|
|
|
|
from lbry.wallet.server.db.canonical import FindShortestID
|
2019-06-21 03:02:58 +02:00
|
|
|
from lbry.wallet.server.block_processor import Timer
|
|
|
|
from lbry.wallet.transaction import Transaction, Input, Output
|
2019-03-31 00:40:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_output(amount=COIN, pubkey_hash=NULL_HASH32):
|
|
|
|
return Transaction() \
|
|
|
|
.add_outputs([Output.pay_pubkey_hash(amount, pubkey_hash)]) \
|
|
|
|
.outputs[0]
|
|
|
|
|
|
|
|
|
|
|
|
def get_input():
|
|
|
|
return Input.spend(get_output())
|
|
|
|
|
|
|
|
|
|
|
|
def get_tx():
|
|
|
|
return Transaction().add_inputs([get_input()])
|
|
|
|
|
|
|
|
|
|
|
|
class OldWalletServerTransaction:
|
|
|
|
def __init__(self, tx):
|
|
|
|
self.tx = tx
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
return self.tx.raw
|
|
|
|
|
|
|
|
|
|
|
|
class TestSQLDB(unittest.TestCase):
|
2019-07-16 18:26:28 +02:00
|
|
|
query_timeout = 0.25
|
2019-03-31 00:40:01 +01:00
|
|
|
|
|
|
|
def setUp(self):
|
2019-05-15 05:59:21 +02:00
|
|
|
self.first_sync = False
|
2019-05-20 02:22:25 +02:00
|
|
|
self.daemon_height = 1
|
2019-05-25 04:40:39 +02:00
|
|
|
self.coin = LBCRegTest()
|
2019-07-11 19:29:26 +02:00
|
|
|
db_url = 'file:test_sqldb?mode=memory&cache=shared'
|
|
|
|
self.sql = writer.SQLDB(self, db_url)
|
|
|
|
self.addCleanup(self.sql.close)
|
2019-07-16 18:26:28 +02:00
|
|
|
reader.initializer(logging.getLogger(__name__), db_url, 'regtest', self.query_timeout)
|
2019-07-11 19:29:26 +02:00
|
|
|
self.addCleanup(reader.cleanup)
|
2019-05-15 05:59:21 +02:00
|
|
|
self.timer = Timer('BlockProcessor')
|
2019-03-31 00:40:01 +01:00
|
|
|
self.sql.open()
|
|
|
|
self._current_height = 0
|
2019-05-06 01:16:51 +02:00
|
|
|
self._txos = {}
|
2019-03-31 00:40:01 +01:00
|
|
|
|
2019-05-15 05:59:21 +02:00
|
|
|
def _make_tx(self, output, txi=None):
|
2019-03-31 00:40:01 +01:00
|
|
|
tx = get_tx().add_outputs([output])
|
2019-05-15 05:59:21 +02:00
|
|
|
if txi is not None:
|
|
|
|
tx.add_inputs([txi])
|
2019-05-06 01:16:51 +02:00
|
|
|
self._txos[output.ref.hash] = output
|
2019-03-31 00:40:01 +01:00
|
|
|
return OldWalletServerTransaction(tx), tx.hash
|
|
|
|
|
2019-05-26 05:06:22 +02:00
|
|
|
def _set_channel_key(self, channel, key):
|
|
|
|
private_key = ecdsa.SigningKey.from_string(key*32, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
|
2019-05-29 00:54:37 +02:00
|
|
|
channel.private_key = private_key
|
2019-05-24 05:55:57 +02:00
|
|
|
channel.claim.channel.public_key_bytes = private_key.get_verifying_key().to_der()
|
|
|
|
channel.script.generate()
|
2019-05-26 05:06:22 +02:00
|
|
|
|
|
|
|
def get_channel(self, title, amount, name='@foo', key=b'a'):
|
|
|
|
claim = Claim()
|
|
|
|
claim.channel.title = title
|
|
|
|
channel = Output.pay_claim_name_pubkey_hash(amount, name, claim, b'abc')
|
|
|
|
self._set_channel_key(channel, key)
|
2019-03-31 00:40:01 +01:00
|
|
|
return self._make_tx(channel)
|
|
|
|
|
2019-05-26 05:06:22 +02:00
|
|
|
def get_channel_update(self, channel, amount, key=b'a'):
|
|
|
|
self._set_channel_key(channel, key)
|
2019-05-25 04:40:39 +02:00
|
|
|
return self._make_tx(
|
|
|
|
Output.pay_update_claim_pubkey_hash(
|
|
|
|
amount, channel.claim_name, channel.claim_id, channel.claim, b'abc'
|
|
|
|
),
|
|
|
|
Input.spend(channel)
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_stream(self, title, amount, name='foo', channel=None):
|
2019-03-31 00:40:01 +01:00
|
|
|
claim = Claim()
|
|
|
|
claim.stream.title = title
|
2019-05-25 04:40:39 +02:00
|
|
|
result = self._make_tx(Output.pay_claim_name_pubkey_hash(amount, name, claim, b'abc'))
|
|
|
|
if channel:
|
|
|
|
result[0].tx.outputs[0].sign(channel)
|
|
|
|
result[0].tx._reset()
|
|
|
|
return result
|
2019-03-31 00:40:01 +01:00
|
|
|
|
|
|
|
def get_stream_update(self, tx, amount):
|
|
|
|
claim = Transaction(tx[0].serialize()).outputs[0]
|
|
|
|
return self._make_tx(
|
|
|
|
Output.pay_update_claim_pubkey_hash(
|
|
|
|
amount, claim.claim_name, claim.claim_id, claim.claim, b'abc'
|
2019-05-15 05:59:21 +02:00
|
|
|
),
|
|
|
|
Input.spend(claim)
|
2019-03-31 00:40:01 +01:00
|
|
|
)
|
|
|
|
|
2019-05-28 04:20:21 +02:00
|
|
|
def get_abandon(self, tx):
|
2019-05-18 05:54:03 +02:00
|
|
|
claim = Transaction(tx[0].serialize()).outputs[0]
|
|
|
|
return self._make_tx(
|
|
|
|
Output.pay_pubkey_hash(claim.amount, b'abc'),
|
|
|
|
Input.spend(claim)
|
|
|
|
)
|
|
|
|
|
2019-03-31 00:40:01 +01:00
|
|
|
def get_support(self, tx, amount):
|
|
|
|
claim = Transaction(tx[0].serialize()).outputs[0]
|
|
|
|
return self._make_tx(
|
|
|
|
Output.pay_support_pubkey_hash(
|
|
|
|
amount, claim.claim_name, claim.claim_id, b'abc'
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_controlling(self):
|
2019-05-06 01:16:51 +02:00
|
|
|
for claim in self.sql.execute("select claim.* from claimtrie natural join claim"):
|
|
|
|
txo = self._txos[claim['txo_hash']]
|
2019-03-31 00:40:01 +01:00
|
|
|
controlling = txo.claim.stream.title, claim['amount'], claim['effective_amount'], claim['activation_height']
|
|
|
|
return controlling
|
|
|
|
|
|
|
|
def get_active(self):
|
|
|
|
controlling = self.get_controlling()
|
|
|
|
active = []
|
|
|
|
for claim in self.sql.execute(
|
2019-05-06 01:16:51 +02:00
|
|
|
f"select * from claim where activation_height <= {self._current_height}"):
|
|
|
|
txo = self._txos[claim['txo_hash']]
|
2019-03-31 00:40:01 +01:00
|
|
|
if controlling and controlling[0] == txo.claim.stream.title:
|
|
|
|
continue
|
|
|
|
active.append((txo.claim.stream.title, claim['amount'], claim['effective_amount'], claim['activation_height']))
|
|
|
|
return active
|
|
|
|
|
|
|
|
def get_accepted(self):
|
|
|
|
accepted = []
|
|
|
|
for claim in self.sql.execute(
|
2019-05-06 01:16:51 +02:00
|
|
|
f"select * from claim where activation_height > {self._current_height}"):
|
|
|
|
txo = self._txos[claim['txo_hash']]
|
2019-03-31 00:40:01 +01:00
|
|
|
accepted.append((txo.claim.stream.title, claim['amount'], claim['effective_amount'], claim['activation_height']))
|
|
|
|
return accepted
|
|
|
|
|
|
|
|
def advance(self, height, txs):
|
|
|
|
self._current_height = height
|
2019-05-20 02:22:25 +02:00
|
|
|
self.sql.advance_txs(height, txs, {'timestamp': 1}, self.daemon_height, self.timer)
|
2019-05-19 21:57:39 +02:00
|
|
|
return [otx[0].tx.outputs[0] for otx in txs]
|
2019-03-31 00:40:01 +01:00
|
|
|
|
|
|
|
def state(self, controlling=None, active=None, accepted=None):
|
2019-06-05 00:59:25 +02:00
|
|
|
self.assertEqual(controlling, self.get_controlling())
|
2019-03-31 00:40:01 +01:00
|
|
|
self.assertEqual(active or [], self.get_active())
|
|
|
|
self.assertEqual(accepted or [], self.get_accepted())
|
|
|
|
|
2019-06-23 01:25:32 +02:00
|
|
|
|
|
|
|
class TestClaimtrie(TestSQLDB):
|
|
|
|
|
2019-03-31 00:40:01 +01:00
|
|
|
def test_example_from_spec(self):
|
|
|
|
# https://spec.lbry.com/#claim-activation-example
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
stream = self.get_stream('Claim A', 10*COIN)
|
|
|
|
advance(13, [stream])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
advance(1001, [self.get_stream('Claim B', 20*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[('Claim B', 20*COIN, 0, 1031)]
|
|
|
|
)
|
|
|
|
advance(1010, [self.get_support(stream, 14*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 24*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[('Claim B', 20*COIN, 0, 1031)]
|
|
|
|
)
|
|
|
|
advance(1020, [self.get_stream('Claim C', 50*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 24*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[
|
|
|
|
('Claim B', 20*COIN, 0, 1031),
|
|
|
|
('Claim C', 50*COIN, 0, 1051)]
|
|
|
|
)
|
|
|
|
advance(1031, [])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 24*COIN, 13),
|
|
|
|
active=[('Claim B', 20*COIN, 20*COIN, 1031)],
|
|
|
|
accepted=[('Claim C', 50*COIN, 0, 1051)]
|
|
|
|
)
|
|
|
|
advance(1040, [self.get_stream('Claim D', 300*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 24*COIN, 13),
|
|
|
|
active=[('Claim B', 20*COIN, 20*COIN, 1031)],
|
|
|
|
accepted=[
|
|
|
|
('Claim C', 50*COIN, 0, 1051),
|
|
|
|
('Claim D', 300*COIN, 0, 1072)]
|
|
|
|
)
|
|
|
|
advance(1051, [])
|
|
|
|
state(
|
|
|
|
controlling=('Claim D', 300*COIN, 300*COIN, 1051),
|
|
|
|
active=[
|
2019-05-06 01:16:51 +02:00
|
|
|
('Claim A', 10*COIN, 24*COIN, 13),
|
|
|
|
('Claim B', 20*COIN, 20*COIN, 1031),
|
2019-03-31 00:40:01 +01:00
|
|
|
('Claim C', 50*COIN, 50*COIN, 1051)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
# beyond example
|
|
|
|
advance(1052, [self.get_stream_update(stream, 290*COIN)])
|
|
|
|
state(
|
2019-05-06 01:16:51 +02:00
|
|
|
controlling=('Claim A', 290*COIN, 304*COIN, 13),
|
2019-03-31 00:40:01 +01:00
|
|
|
active=[
|
2019-05-06 01:16:51 +02:00
|
|
|
('Claim B', 20*COIN, 20*COIN, 1031),
|
|
|
|
('Claim C', 50*COIN, 50*COIN, 1051),
|
|
|
|
('Claim D', 300*COIN, 300*COIN, 1051),
|
2019-03-31 00:40:01 +01:00
|
|
|
],
|
|
|
|
accepted=[]
|
|
|
|
)
|
2019-05-18 05:54:03 +02:00
|
|
|
|
2019-05-19 01:54:13 +02:00
|
|
|
def test_competing_claims_subsequent_blocks_height_wins(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
advance(13, [self.get_stream('Claim A', 10*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
advance(14, [self.get_stream('Claim B', 10*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[('Claim B', 10*COIN, 10*COIN, 14)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
advance(15, [self.get_stream('Claim C', 10*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[
|
|
|
|
('Claim B', 10*COIN, 10*COIN, 14),
|
|
|
|
('Claim C', 10*COIN, 10*COIN, 15)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_competing_claims_in_single_block_position_wins(self):
|
2019-05-18 05:54:03 +02:00
|
|
|
advance, state = self.advance, self.state
|
|
|
|
stream = self.get_stream('Claim A', 10*COIN)
|
|
|
|
stream2 = self.get_stream('Claim B', 10*COIN)
|
|
|
|
advance(13, [stream, stream2])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[('Claim B', 10*COIN, 10*COIN, 13)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_competing_claims_in_single_block_effective_amount_wins(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
stream = self.get_stream('Claim A', 10*COIN)
|
|
|
|
stream2 = self.get_stream('Claim B', 11*COIN)
|
|
|
|
advance(13, [stream, stream2])
|
|
|
|
state(
|
|
|
|
controlling=('Claim B', 11*COIN, 11*COIN, 13),
|
|
|
|
active=[('Claim A', 10*COIN, 10*COIN, 13)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_winning_claim_deleted(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
stream = self.get_stream('Claim A', 10*COIN)
|
|
|
|
stream2 = self.get_stream('Claim B', 11*COIN)
|
|
|
|
advance(13, [stream, stream2])
|
|
|
|
state(
|
|
|
|
controlling=('Claim B', 11*COIN, 11*COIN, 13),
|
|
|
|
active=[('Claim A', 10*COIN, 10*COIN, 13)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
2019-05-28 04:20:21 +02:00
|
|
|
advance(14, [self.get_abandon(stream2)])
|
2019-05-18 05:54:03 +02:00
|
|
|
state(
|
|
|
|
controlling=('Claim A', 10*COIN, 10*COIN, 13),
|
|
|
|
active=[],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_winning_claim_deleted_and_new_claim_becomes_winner(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
stream = self.get_stream('Claim A', 10*COIN)
|
|
|
|
stream2 = self.get_stream('Claim B', 11*COIN)
|
|
|
|
advance(13, [stream, stream2])
|
|
|
|
state(
|
|
|
|
controlling=('Claim B', 11*COIN, 11*COIN, 13),
|
|
|
|
active=[('Claim A', 10*COIN, 10*COIN, 13)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
2019-05-28 04:20:21 +02:00
|
|
|
advance(15, [self.get_abandon(stream2), self.get_stream('Claim C', 12*COIN)])
|
2019-05-18 05:54:03 +02:00
|
|
|
state(
|
|
|
|
controlling=('Claim C', 12*COIN, 12*COIN, 15),
|
|
|
|
active=[('Claim A', 10*COIN, 10*COIN, 13)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
2019-05-19 21:57:39 +02:00
|
|
|
|
2019-06-05 00:59:25 +02:00
|
|
|
def test_winning_claim_expires_and_another_takes_over(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
advance(10, [self.get_stream('Claim A', 11*COIN)])
|
|
|
|
advance(20, [self.get_stream('Claim B', 10*COIN)])
|
|
|
|
state(
|
|
|
|
controlling=('Claim A', 11*COIN, 11*COIN, 10),
|
|
|
|
active=[('Claim B', 10*COIN, 10*COIN, 20)],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
advance(262984, [])
|
|
|
|
state(
|
|
|
|
controlling=('Claim B', 10*COIN, 10*COIN, 20),
|
|
|
|
active=[],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
advance(262994, [])
|
|
|
|
state(
|
|
|
|
controlling=None,
|
|
|
|
active=[],
|
|
|
|
accepted=[]
|
|
|
|
)
|
|
|
|
|
2019-05-24 05:55:57 +02:00
|
|
|
@staticmethod
|
2019-05-25 04:40:39 +02:00
|
|
|
def _get_x_with_claim_id_prefix(getter, prefix, cached_iteration=None, **kwargs):
|
2019-05-26 05:06:22 +02:00
|
|
|
iterations = cached_iteration+1 if cached_iteration else 100
|
2019-05-24 05:55:57 +02:00
|
|
|
for i in range(cached_iteration or 1, iterations):
|
2019-05-25 04:40:39 +02:00
|
|
|
stream = getter(f'claim #{i}', COIN, **kwargs)
|
2019-05-24 05:55:57 +02:00
|
|
|
if stream[0].tx.outputs[0].claim_id.startswith(prefix):
|
2019-05-25 04:40:39 +02:00
|
|
|
cached_iteration is None and print(f'Found "{prefix}" in {i} iterations.')
|
2019-05-24 05:55:57 +02:00
|
|
|
return stream
|
2019-05-26 05:06:22 +02:00
|
|
|
if cached_iteration:
|
|
|
|
raise ValueError(f'Failed to find "{prefix}" at cached iteration, run with None to find iteration.')
|
|
|
|
raise ValueError(f'Failed to find "{prefix}" in {iterations} iterations, try different values.')
|
2019-05-24 05:55:57 +02:00
|
|
|
|
2019-05-26 05:06:22 +02:00
|
|
|
def get_channel_with_claim_id_prefix(self, prefix, cached_iteration=None, **kwargs):
|
|
|
|
return self._get_x_with_claim_id_prefix(self.get_channel, prefix, cached_iteration, **kwargs)
|
2019-05-24 05:55:57 +02:00
|
|
|
|
2019-05-25 04:40:39 +02:00
|
|
|
def get_stream_with_claim_id_prefix(self, prefix, cached_iteration=None, **kwargs):
|
|
|
|
return self._get_x_with_claim_id_prefix(self.get_stream, prefix, cached_iteration, **kwargs)
|
2019-05-24 05:55:57 +02:00
|
|
|
|
2019-05-26 05:06:22 +02:00
|
|
|
def test_canonical_url_and_channel_validation(self):
|
2019-05-24 05:55:57 +02:00
|
|
|
advance = self.advance
|
2019-05-25 04:40:39 +02:00
|
|
|
|
2019-05-26 05:06:22 +02:00
|
|
|
tx_chan_a = self.get_channel_with_claim_id_prefix('a', 1, key=b'c')
|
|
|
|
tx_chan_ab = self.get_channel_with_claim_id_prefix('ab', 72, key=b'c')
|
2019-05-25 04:40:39 +02:00
|
|
|
txo_chan_a = tx_chan_a[0].tx.outputs[0]
|
|
|
|
advance(1, [tx_chan_a])
|
|
|
|
advance(2, [tx_chan_ab])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_ab, r_a = reader._search(order_by=['creation_height'], limit=2)
|
2019-05-26 05:06:22 +02:00
|
|
|
self.assertEqual("@foo#a", r_a['short_url'])
|
|
|
|
self.assertEqual("@foo#ab", r_ab['short_url'])
|
|
|
|
self.assertIsNone(r_a['canonical_url'])
|
|
|
|
self.assertIsNone(r_ab['canonical_url'])
|
|
|
|
self.assertEqual(0, r_a['claims_in_channel'])
|
|
|
|
self.assertEqual(0, r_ab['claims_in_channel'])
|
2019-05-25 04:40:39 +02:00
|
|
|
|
2019-05-24 05:55:57 +02:00
|
|
|
tx_a = self.get_stream_with_claim_id_prefix('a', 2)
|
2019-05-25 04:40:39 +02:00
|
|
|
tx_ab = self.get_stream_with_claim_id_prefix('ab', 42)
|
|
|
|
tx_abc = self.get_stream_with_claim_id_prefix('abc', 65)
|
|
|
|
advance(3, [tx_a])
|
2019-05-26 05:06:22 +02:00
|
|
|
advance(4, [tx_ab, tx_abc])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_abc, r_ab, r_a = reader._search(order_by=['creation_height', 'tx_position'], limit=3)
|
2019-05-26 05:06:22 +02:00
|
|
|
self.assertEqual("foo#a", r_a['short_url'])
|
|
|
|
self.assertEqual("foo#ab", r_ab['short_url'])
|
|
|
|
self.assertEqual("foo#abc", r_abc['short_url'])
|
|
|
|
self.assertIsNone(r_a['canonical_url'])
|
|
|
|
self.assertIsNone(r_ab['canonical_url'])
|
|
|
|
self.assertIsNone(r_abc['canonical_url'])
|
2019-05-25 04:40:39 +02:00
|
|
|
|
|
|
|
tx_a2 = self.get_stream_with_claim_id_prefix('a', 7, channel=txo_chan_a)
|
|
|
|
tx_ab2 = self.get_stream_with_claim_id_prefix('ab', 23, channel=txo_chan_a)
|
2019-05-26 05:06:22 +02:00
|
|
|
a2_claim_id = tx_a2[0].tx.outputs[0].claim_id
|
|
|
|
ab2_claim_id = tx_ab2[0].tx.outputs[0].claim_id
|
2019-05-25 04:40:39 +02:00
|
|
|
advance(6, [tx_a2])
|
|
|
|
advance(7, [tx_ab2])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_ab2, r_a2 = reader._search(order_by=['creation_height'], limit=2)
|
2019-05-26 05:06:22 +02:00
|
|
|
self.assertEqual(f"foo#{a2_claim_id[:2]}", r_a2['short_url'])
|
|
|
|
self.assertEqual(f"foo#{ab2_claim_id[:4]}", r_ab2['short_url'])
|
|
|
|
self.assertEqual("@foo#a/foo#a", r_a2['canonical_url'])
|
|
|
|
self.assertEqual("@foo#a/foo#ab", r_ab2['canonical_url'])
|
2019-07-13 06:34:54 +02:00
|
|
|
self.assertEqual(2, reader._search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
|
2019-05-26 05:06:22 +02:00
|
|
|
|
2019-05-26 05:50:37 +02:00
|
|
|
# change channel public key, invaliding stream claim signatures
|
2019-05-26 05:06:22 +02:00
|
|
|
advance(8, [self.get_channel_update(txo_chan_a, COIN, key=b'a')])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_ab2, r_a2 = reader._search(order_by=['creation_height'], limit=2)
|
2019-05-26 05:06:22 +02:00
|
|
|
self.assertEqual(f"foo#{a2_claim_id[:2]}", r_a2['short_url'])
|
|
|
|
self.assertEqual(f"foo#{ab2_claim_id[:4]}", r_ab2['short_url'])
|
|
|
|
self.assertIsNone(r_a2['canonical_url'])
|
|
|
|
self.assertIsNone(r_ab2['canonical_url'])
|
2019-07-13 06:34:54 +02:00
|
|
|
self.assertEqual(0, reader._search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
|
2019-05-26 05:06:22 +02:00
|
|
|
|
2019-05-26 05:50:37 +02:00
|
|
|
# reinstate previous channel public key (previous stream claim signatures become valid again)
|
2019-05-28 04:20:21 +02:00
|
|
|
channel_update = self.get_channel_update(txo_chan_a, COIN, key=b'c')
|
|
|
|
advance(9, [channel_update])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_ab2, r_a2 = reader._search(order_by=['creation_height'], limit=2)
|
2019-05-26 05:06:22 +02:00
|
|
|
self.assertEqual(f"foo#{a2_claim_id[:2]}", r_a2['short_url'])
|
|
|
|
self.assertEqual(f"foo#{ab2_claim_id[:4]}", r_ab2['short_url'])
|
|
|
|
self.assertEqual("@foo#a/foo#a", r_a2['canonical_url'])
|
|
|
|
self.assertEqual("@foo#a/foo#ab", r_ab2['canonical_url'])
|
2019-07-13 06:34:54 +02:00
|
|
|
self.assertEqual(2, reader._search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
|
2019-05-24 05:55:57 +02:00
|
|
|
|
2019-06-23 01:25:32 +02:00
|
|
|
# claim abandon updates claims_in_channel
|
|
|
|
advance(10, [self.get_abandon(tx_ab2)])
|
2019-07-13 06:34:54 +02:00
|
|
|
self.assertEqual(1, reader._search(claim_id=txo_chan_a.claim_id, limit=1)[0]['claims_in_channel'])
|
2019-06-23 01:25:32 +02:00
|
|
|
|
2019-05-28 04:20:21 +02:00
|
|
|
# delete channel, invaliding stream claim signatures
|
2019-06-23 01:25:32 +02:00
|
|
|
advance(11, [self.get_abandon(channel_update)])
|
2019-07-13 06:34:54 +02:00
|
|
|
r_a2, = reader._search(order_by=['creation_height'], limit=1)
|
2019-05-28 04:20:21 +02:00
|
|
|
self.assertEqual(f"foo#{a2_claim_id[:2]}", r_a2['short_url'])
|
|
|
|
self.assertIsNone(r_a2['canonical_url'])
|
|
|
|
|
2019-05-24 05:55:57 +02:00
|
|
|
def test_canonical_find_shortest_id(self):
|
2019-05-25 04:40:39 +02:00
|
|
|
new_hash = 'abcdef0123456789beef'
|
|
|
|
other0 = '1bcdef0123456789beef'
|
|
|
|
other1 = 'ab1def0123456789beef'
|
|
|
|
other2 = 'abc1ef0123456789beef'
|
|
|
|
other3 = 'abcdef0123456789bee1'
|
2019-05-24 05:55:57 +02:00
|
|
|
f = FindShortestID()
|
|
|
|
f.step(other0, new_hash)
|
|
|
|
self.assertEqual('#a', f.finalize())
|
|
|
|
f.step(other1, new_hash)
|
|
|
|
self.assertEqual('#abc', f.finalize())
|
|
|
|
f.step(other2, new_hash)
|
|
|
|
self.assertEqual('#abcd', f.finalize())
|
|
|
|
f.step(other3, new_hash)
|
|
|
|
self.assertEqual('#abcdef0123456789beef', f.finalize())
|
2019-06-23 01:25:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestTrending(TestSQLDB):
|
|
|
|
|
|
|
|
def test_trending(self):
|
|
|
|
advance, state = self.advance, self.state
|
|
|
|
no_trend = self.get_stream('Claim A', COIN)
|
|
|
|
downwards = self.get_stream('Claim B', COIN)
|
|
|
|
up_small = self.get_stream('Claim C', COIN)
|
|
|
|
up_medium = self.get_stream('Claim D', COIN)
|
|
|
|
up_biggly = self.get_stream('Claim E', COIN)
|
|
|
|
claims = advance(1, [up_biggly, up_medium, up_small, no_trend, downwards])
|
|
|
|
for window in range(1, 8):
|
|
|
|
advance(TRENDING_WINDOW * window, [
|
|
|
|
self.get_support(downwards, (20-window)*COIN),
|
|
|
|
self.get_support(up_small, int(20+(window/10)*COIN)),
|
|
|
|
self.get_support(up_medium, (20+(window*(2 if window == 7 else 1)))*COIN),
|
|
|
|
self.get_support(up_biggly, (20+(window*(3 if window == 7 else 1)))*COIN),
|
|
|
|
])
|
2019-07-13 06:34:54 +02:00
|
|
|
results = reader._search(order_by=['trending_local'])
|
2019-06-23 01:25:32 +02:00
|
|
|
self.assertEqual([c.claim_id for c in claims], [hexlify(c['claim_hash'][::-1]).decode() for c in results])
|
|
|
|
self.assertEqual([10, 6, 2, 0, -2], [int(c['trending_local']) for c in results])
|
|
|
|
self.assertEqual([53, 38, -32, 0, -6], [int(c['trending_global']) for c in results])
|
|
|
|
self.assertEqual([4, 4, 2, 0, 1], [int(c['trending_group']) for c in results])
|
|
|
|
self.assertEqual([53, 38, 2, 0, -6], [int(c['trending_mixed']) for c in results])
|