updated doc strings and fixed wallet server claimtrie implementation

This commit is contained in:
Lex Berezhny 2019-05-18 19:54:13 -04:00
parent e56b72cf04
commit 021ca3efd2
6 changed files with 161 additions and 115 deletions

View file

@ -1671,13 +1671,19 @@ class Daemon(metaclass=JSONRPCServerType):
Use --channel_id=<channel_id> to list all stream claims in a channel.
Arguments marked with "supports equality constraints" allow prepending the
value with an equality constraint such as '>', '>=', '<', '<=', eg.
--height=">400000" would limit results to only claims above 400k block height
Usage:
claim_search [<name> | --name=<name>] [--claim_id=<claim_id>] [--txid=<txid> --nout=<nout>]
[--channel_id=<channel_id>] [--channel_name=<channel_name>] [--is_controlling]
[--order_by=<order_by>...]
[--published_since=<published_since>] [--released_since=<released_since>]
[--block_height=<block_height>] [--after_block_height=<after_block_height>]
[--before_block_height=<before_block_height>]
[--height=<height>] [--publish_time=<publish_time>] [--release_time=<release_time>]
[--amount=<amount>] [--effective_amount=<effective_amount>]
[--support_amount=<support_amount>] [--trending_group=<trending_group>]
[--trending_mixed=<trending_mixed>] [--trending_local=<trending_local>]
[--trending_global=<trending_global] [--activation_height=<activation_height>]
[--any_tags=<any_tags>...] [--all_tags=<all_tags>...] [--not_tags=<not_tags>...]
[--any_languages=<any_languages>...] [--all_languages=<all_languages>...]
[--not_languages=<not_languages>...]
@ -1695,18 +1701,46 @@ class Daemon(metaclass=JSONRPCServerType):
--is_controlling : (bool) limit to controlling claims for their respective name
--order_by=<order_by> : (str) field to order by, default is descending order, to do an
ascending order prepend ^ to the field name, eg. '^amount'
available fields: 'name', 'block_height', 'release_time',
'publish_time', 'amount', 'effective_amount', 'support_amount',
'trending_amount'
--published_since=<published_since> : (int) limit to claims confirmed into blocks on or after
this UTC timestamp
--released_since=<released_since> : (int) limit to claims self-described as having been
available fields: 'name', 'height', 'release_time',
'publish_time', 'amount', 'effective_amount',
'support_amount', 'trending_group', 'trending_mixed',
'trending_local', 'trending_global', 'activation_height'
--height=<height> : (int) limit by block height (supports equality constraints)
--activation_height=<activation_height>: (int) height at which claim starts competing for name
(supports equality constraints)
--publish_time=<publish_time> : (int) limit by UTC timestamp of containing block (supports
equality constraints)
--release_time=<release_time> : (int) limit to claims self-described as having been
released to the public on or after this UTC
timestamp, when claim does not provide
a release time the block time is used instead
--block_height=<block_height> : (int) limit to claims at specific block height
--after_block_height=<after_block_height> : (int) limit to claims after specific block height
--before_block_height=<before_block_height> : (int) limit to claims before specific block height
a release time the publish time is used instead
(supports equality constraints)
--amount=<amount> : (int) limit by claim value (supports equality constraints)
--support_amount=<support_amount>: (int) limit by supports and tips recieved (supports
equality constraints)
--effective_amount=<effective_amount>: (int) limit by total value (initial claim value plus
all tips and supports received), this amount is
blank until claim has reached activation height
(supports equality constraints)
--trending_group=<trending_group>: (int) group numbers 1 through 4 representing the
trending groups of the content: 4 means
content is trending globally and independently,
3 means content is not trending globally but is
trending independently (locally), 2 means it is
trending globally but not independently and 1
means it's not trending globally or locally
(supports equality constraints)
--trending_mixed=<trending_mixed>: (int) trending amount taken from the global or local
value depending on the trending group:
4 - global value, 3 - local value, 2 - global
value, 1 - local value (supports equality
constraints)
--trending_local=<trending_local>: (int) trending value calculated relative only to
the individual contents past history (supports
equality constraints)
--trending_global=<trending_global>: (int) trending value calculated relative to all
trending content globally (supports
equality constraints)
--any_tags=<any_tags> : (list) find claims containing any of the tags
--all_tags=<all_tags> : (list) find claims containing every tag
--not_tags=<not_tags> : (list) find claims not containing any of these tags

View file

@ -25,12 +25,10 @@ class Outputs:
'effective_amount': message.effective_amount,
'support_amount': message.support_amount,
'claims_in_channel': message.claims_in_channel,
'trending_daily': message.trending_daily,
'trending_day_one': message.trending_day_one,
'trending_day_two': message.trending_day_two,
'trending_weekly': message.trending_weekly,
'trending_week_one': message.trending_week_one,
'trending_week_two': message.trending_week_two,
'trending_group': message.trending_group,
'trending_mixed': message.trending_mixed,
'trending_local': message.trending_local,
'trending_global': message.trending_global,
}
try:
if txo.claim.is_channel:
@ -98,12 +96,10 @@ class Outputs:
txo_message.claim.effective_amount = txo['effective_amount']
txo_message.claim.support_amount = txo['support_amount']
txo_message.claim.claims_in_channel = txo['claims_in_channel']
txo_message.claim.trending_daily = txo['trending_daily']
txo_message.claim.trending_day_one = txo['trending_day_one']
txo_message.claim.trending_day_two = txo['trending_day_two']
txo_message.claim.trending_weekly = txo['trending_weekly']
txo_message.claim.trending_week_one = txo['trending_week_one']
txo_message.claim.trending_week_two = txo['trending_week_two']
txo_message.claim.trending_group = txo['trending_group']
txo_message.claim.trending_mixed = txo['trending_mixed']
txo_message.claim.trending_local = txo['trending_local']
txo_message.claim.trending_global = txo['trending_global']
if txo['channel_txo_hash']:
channel = txo_message.claim.channel
channel.height = txo['channel_height']

View file

@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor(
package='pb',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"B\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x02 \x01(\r\x12\x0e\n\x06offset\x18\x03 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xc3\x02\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x16\n\x0eis_controlling\x18\x02 \x01(\x08\x12\x19\n\x11\x61\x63tivation_height\x18\x03 \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x04 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x05 \x01(\x04\x12\x19\n\x11\x63laims_in_channel\x18\x06 \x01(\r\x12\x16\n\x0etrending_daily\x18\x07 \x01(\x12\x12\x18\n\x10trending_day_one\x18\x08 \x01(\x04\x12\x18\n\x10trending_day_two\x18\t \x01(\x04\x12\x17\n\x0ftrending_weekly\x18\n \x01(\x12\x12\x19\n\x11trending_week_one\x18\x0b \x01(\x04\x12\x19\n\x11trending_week_two\x18\x0c \x01(\x04\"i\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\"4\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x62\x06proto3')
serialized_pb=_b('\n\x0cresult.proto\x12\x02pb\"B\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x02 \x01(\r\x12\x0e\n\x06offset\x18\x03 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\x89\x02\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x16\n\x0eis_controlling\x18\x02 \x01(\x08\x12\x19\n\x11\x61\x63tivation_height\x18\x03 \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x04 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x05 \x01(\x04\x12\x19\n\x11\x63laims_in_channel\x18\x06 \x01(\r\x12\x16\n\x0etrending_group\x18\x07 \x01(\r\x12\x16\n\x0etrending_mixed\x18\x08 \x01(\x12\x12\x16\n\x0etrending_local\x18\t \x01(\x12\x12\x17\n\x0ftrending_global\x18\n \x01(\x12\"i\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\"4\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x62\x06proto3')
)
@ -45,8 +45,8 @@ _ERROR_CODE = _descriptor.EnumDescriptor(
],
containing_type=None,
serialized_options=None,
serialized_start=592,
serialized_end=644,
serialized_start=534,
serialized_end=586,
)
_sym_db.RegisterEnumDescriptor(_ERROR_CODE)
@ -208,47 +208,33 @@ _CLAIMMETA = _descriptor.Descriptor(
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_daily', full_name='pb.ClaimMeta.trending_daily', index=6,
number=7, type=18, cpp_type=2, label=1,
name='trending_group', full_name='pb.ClaimMeta.trending_group', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_day_one', full_name='pb.ClaimMeta.trending_day_one', index=7,
number=8, type=4, cpp_type=4, label=1,
name='trending_mixed', full_name='pb.ClaimMeta.trending_mixed', index=7,
number=8, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_day_two', full_name='pb.ClaimMeta.trending_day_two', index=8,
number=9, type=4, cpp_type=4, label=1,
name='trending_local', full_name='pb.ClaimMeta.trending_local', index=8,
number=9, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_weekly', full_name='pb.ClaimMeta.trending_weekly', index=9,
name='trending_global', full_name='pb.ClaimMeta.trending_global', index=9,
number=10, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_week_one', full_name='pb.ClaimMeta.trending_week_one', index=10,
number=11, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trending_week_two', full_name='pb.ClaimMeta.trending_week_two', index=11,
number=12, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@ -262,7 +248,7 @@ _CLAIMMETA = _descriptor.Descriptor(
oneofs=[
],
serialized_start=214,
serialized_end=537,
serialized_end=479,
)
@ -300,8 +286,8 @@ _ERROR = _descriptor.Descriptor(
extension_ranges=[],
oneofs=[
],
serialized_start=539,
serialized_end=644,
serialized_start=481,
serialized_end=586,
)
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT

View file

@ -56,8 +56,7 @@ def _apply_constraints_for_array_attributes(constraints, attr):
class SQLDB:
TRENDING_24_HOURS = 720
TRENDING_WEEK = TRENDING_24_HOURS * 7
DAY_BLOCKS = 720
PRAGMAS = """
pragma journal_mode=WAL;
@ -79,13 +78,12 @@ class SQLDB:
amount integer not null,
effective_amount integer not null default 0,
support_amount integer not null default 0,
trending_daily integer not null default 0,
trending_day_one integer not null default 0,
trending_day_two integer not null default 0,
trending_weekly integer not null default 0,
trending_week_one integer not null default 0,
trending_week_two integer not null default 0
trending_group integer not null default 0,
trending_mixed integer not null default 0,
trending_local integer not null default 0,
trending_global integer not null default 0
);
create index if not exists claim_normalized_idx on claim (normalized);
create index if not exists claim_txo_hash_idx on claim (txo_hash);
create index if not exists claim_channel_hash_idx on claim (channel_hash);
@ -93,8 +91,21 @@ class SQLDB:
create index if not exists claim_publish_time_idx on claim (publish_time);
create index if not exists claim_height_idx on claim (height);
create index if not exists claim_activation_height_idx on claim (activation_height);
create index if not exists claim_trending_daily_idx on claim (trending_daily);
create index if not exists claim_trending_weekly_idx on claim (trending_weekly);
create index if not exists claim_trending_group_idx on claim (trending_group);
create index if not exists claim_trending_mixed_idx on claim (trending_mixed);
create index if not exists claim_trending_local_idx on claim (trending_local);
create index if not exists claim_trending_global_idx on claim (trending_global);
"""
CREATE_TREND_TABLE = """
create table if not exists trend (
claim_hash bytes not null,
height integer not null,
amount integer not null,
primary key (claim_hash, height)
) without rowid;
create index if not exists trend_claim_hash_idx on trend (claim_hash);
"""
CREATE_SUPPORT_TABLE = """
@ -132,6 +143,7 @@ class SQLDB:
CREATE_TABLES_QUERY = (
PRAGMAS +
CREATE_CLAIM_TABLE +
CREATE_TREND_TABLE +
CREATE_SUPPORT_TABLE +
CREATE_CLAIMTRIE_TABLE +
CREATE_TAG_TABLE
@ -279,22 +291,19 @@ class SQLDB:
self.execute(*self._delete_sql(table, {'claim_hash__in': binary_claim_hashes}))
def split_inputs_into_claims_supports_and_other(self, txis):
txo_hashes = set(txi.txo_ref.hash for txi in txis)
claim_txo_hashes = set()
claims = {}
for claim in self.execute(*query(
"SELECT txo_hash, claim_hash, normalized FROM claim",
txo_hash__in=[sqlite3.Binary(txo_hash) for txo_hash in txo_hashes])):
claim_txo_hashes.add(claim[0])
claims[claim[1]] = claim[2]
txo_hashes -= set(claim_txo_hashes)
txo_hashes = {txi.txo_ref.hash for txi in txis}
claims = self.execute(*query(
"SELECT txo_hash, claim_hash, normalized FROM claim",
txo_hash__in=[sqlite3.Binary(txo_hash) for txo_hash in txo_hashes]
)).fetchall()
txo_hashes -= {r['txo_hash'] for r in claims}
supports = {}
if txo_hashes:
supports = dict(self.execute(*query(
supports = self.execute(*query(
"SELECT txo_hash, claim_hash FROM support",
txo_hash__in=[sqlite3.Binary(txo_hash) for txo_hash in txo_hashes]
)))
txo_hashes -= set(supports)
)).fetchall()
txo_hashes -= {r['txo_hash'] for r in supports}
return claims, supports, txo_hashes
def insert_supports(self, txos: Set[Output]):
@ -320,6 +329,7 @@ class SQLDB:
))
def _update_trending_amount(self, height):
return
day_ago = height-self.TRENDING_24_HOURS
two_day_ago = height-self.TRENDING_24_HOURS*2
week_ago = height-self.TRENDING_WEEK
@ -388,19 +398,24 @@ class SQLDB:
WHERE activation_height IS NULL
""")
def _perform_overtake(self, height, changed):
constraint = f"normalized IN ({','.join('?' for _ in changed)}) OR " if changed else ""
def _perform_overtake(self, height, changed_claim_hashes, deleted_names):
deleted_names_sql = claim_hashes_sql = ""
if changed_claim_hashes:
claim_hashes_sql = f"OR claim_hash IN ({','.join('?' for _ in changed_claim_hashes)})"
if deleted_names:
deleted_names_sql = f"OR normalized IN ({','.join('?' for _ in deleted_names)})"
overtakes = self.execute(f"""
SELECT winner.normalized, winner.claim_hash, claimtrie.claim_hash AS current_winner FROM (
SELECT normalized, claim_hash FROM claim
WHERE {constraint}
normalized IN (SELECT normalized FROM claim WHERE activation_height={height})
ORDER BY effective_amount, height, tx_position DESC
WHERE normalized IN (
SELECT normalized FROM claim WHERE activation_height={height} {claim_hashes_sql}
) {deleted_names_sql}
ORDER BY effective_amount ASC, height DESC, tx_position DESC
-- the order by is backwards, because GROUP BY picks last row
) AS winner LEFT JOIN claimtrie USING (normalized)
GROUP BY winner.normalized
HAVING current_winner IS NULL
OR current_winner <> winner.claim_hash
""", changed)
HAVING current_winner IS NULL OR current_winner <> winner.claim_hash
""", changed_claim_hashes+deleted_names)
for overtake in overtakes:
if overtake['current_winner']:
self.execute(
@ -425,19 +440,19 @@ class SQLDB:
self.execute(f"DROP TABLE claimtrie{height-50}")
self.execute(f"CREATE TABLE claimtrie{height} AS SELECT * FROM claimtrie")
def update_claimtrie(self, height, changed_names, amount_affected_claim_hashes, timer):
def update_claimtrie(self, height, changed_claim_hashes, deleted_names, timer):
binary_claim_hashes = [
sqlite3.Binary(claim_hash) for claim_hash in amount_affected_claim_hashes
sqlite3.Binary(claim_hash) for claim_hash in changed_claim_hashes
]
r = timer.run
r(self._calculate_activation_height, height)
r(self._update_support_amount, binary_claim_hashes)
r(self._update_effective_amount, height, binary_claim_hashes)
r(self._perform_overtake, height, list(changed_names))
r(self._perform_overtake, height, binary_claim_hashes, list(deleted_names))
r(self._update_effective_amount, height)
r(self._perform_overtake, height, [])
r(self._perform_overtake, height, [], [])
if not self.main.first_sync:
r(self._update_trending_amount, height)
@ -446,54 +461,46 @@ class SQLDB:
insert_claims = set()
update_claims = set()
delete_claim_hashes = set()
deleted_and_inserted_names = set()
amount_affected_claim_hashes = set()
insert_supports = set()
delete_supports = set()
delete_support_txo_hashes = set()
recalculate_claim_hashes = set() # added/deleted supports, added/updated claim
deleted_claim_names = set()
body_timer = timer.add_timer('body')
for position, (etx, txid) in enumerate(all_txs):
tx = timer.run(
Transaction, etx.serialize(), height=height, position=position
)
# Inputs
spent_claims, spent_supports, spent_other = timer.run(
self.split_inputs_into_claims_supports_and_other, tx.inputs
)
body_timer.start()
delete_claim_hashes.update(spent_claims.keys())
deleted_and_inserted_names.update(spent_claims.values())
amount_affected_claim_hashes.update(spent_supports.values())
delete_supports.update(spent_supports)
delete_claim_hashes.update({r['claim_hash'] for r in spent_claims})
delete_support_txo_hashes.update({r['txo_hash'] for r in spent_supports})
deleted_claim_names.update({r['normalized'] for r in spent_claims})
recalculate_claim_hashes.update({r['claim_hash'] for r in spent_supports})
# Outputs
for output in tx.outputs:
if output.is_support:
insert_supports.add(output)
amount_affected_claim_hashes.add(output.claim_hash)
recalculate_claim_hashes.add(output.claim_hash)
elif output.script.is_claim_name:
insert_claims.add(output)
try:
deleted_and_inserted_names.add(output.normalized_name)
except:
self.logger.exception(
f"Could not decode claim name for claim_id: {output.claim_id}, "
f"txid: {output.tx_ref.id}, nout: {output.position}.")
print(output.script.values['claim_name'])
continue
recalculate_claim_hashes.add(output.claim_hash)
elif output.script.is_update_claim:
claim_hash = output.claim_hash
if claim_hash in delete_claim_hashes:
delete_claim_hashes.remove(claim_hash)
update_claims.add(output)
amount_affected_claim_hashes.add(claim_hash)
recalculate_claim_hashes.add(output.claim_hash)
body_timer.stop()
r = timer.run
r(self.delete_claims, delete_claim_hashes)
r(self.delete_supports, delete_supports)
r(self.delete_supports, delete_support_txo_hashes)
r(self.insert_claims, insert_claims, header)
r(self.update_claims, update_claims, header)
r(self.insert_supports, insert_supports)
r(self.update_claimtrie, height,
deleted_and_inserted_names,
amount_affected_claim_hashes,
forward_timer=True)
r(self.update_claimtrie, height, recalculate_claim_hashes, deleted_claim_names, forward_timer=True)
def get_claims(self, cols, **constraints):
if 'order_by' in constraints:
@ -598,8 +605,8 @@ class SQLDB:
claimtrie.claim_hash as is_controlling,
claim.claim_hash, claim.txo_hash, claim.height,
claim.activation_height, claim.effective_amount, claim.support_amount,
claim.trending_daily, claim.trending_day_one, claim.trending_day_two,
claim.trending_weekly, claim.trending_week_one, claim.trending_week_two,
claim.trending_group, claim.trending_mixed,
claim.trending_local, claim.trending_global,
CASE WHEN claim.is_channel=1 THEN (
SELECT COUNT(*) FROM claim as claim_in_channel
WHERE claim_in_channel.channel_hash=claim.claim_hash
@ -609,10 +616,10 @@ class SQLDB:
)
INTEGER_PARAMS = {
'height', 'release_time', 'publish_time',
'height', 'activation_height', 'release_time', 'publish_time',
'amount', 'effective_amount', 'support_amount',
'trending_daily', 'trending_day_one', 'trending_day_two',
'trending_weekly', 'trending_week_one', 'trending_week_two'
'trending_group', 'trending_mixed',
'trending_local', 'trending_global',
}
SEARCH_PARAMS = {

View file

@ -102,7 +102,7 @@ class ResolveCommand(CommandTestCase):
await self.assertResolvesToClaimId('@foo', claim_id3)
await self.support_create(claim_id2, '0.19')
await self.assertResolvesToClaimId('@foo', claim_id2)
await self.support_create(claim_id1, '0.19')
await self.support_create(claim_id1, '0.29')
await self.assertResolvesToClaimId('@foo', claim_id1)
async def test_advanced_resolve(self):

View file

@ -181,7 +181,30 @@ class TestSQLDB(unittest.TestCase):
accepted=[]
)
def test_competing_claims_in_single_block_height_wins(self):
def test_competing_claims_subsequent_blocks_height_wins(self):
advance, state = self.advance, self.state
advance(13, [self.get_stream('Claim A', 10*COIN)])
state(
controlling=('Claim A', 10*COIN, 10*COIN, 13),
active=[],
accepted=[]
)
advance(14, [self.get_stream('Claim B', 10*COIN)])
state(
controlling=('Claim A', 10*COIN, 10*COIN, 13),
active=[('Claim B', 10*COIN, 10*COIN, 14)],
accepted=[]
)
advance(15, [self.get_stream('Claim C', 10*COIN)])
state(
controlling=('Claim A', 10*COIN, 10*COIN, 13),
active=[
('Claim B', 10*COIN, 10*COIN, 14),
('Claim C', 10*COIN, 10*COIN, 15)],
accepted=[]
)
def test_competing_claims_in_single_block_position_wins(self):
advance, state = self.advance, self.state
stream = self.get_stream('Claim A', 10*COIN)
stream2 = self.get_stream('Claim B', 10*COIN)