This commit is contained in:
Lex Berezhny 2020-06-21 20:14:14 -04:00
parent 4810ff5f94
commit a3ef8d7411
6 changed files with 43 additions and 42 deletions

View file

@ -175,6 +175,28 @@ class BlockchainDB:
return await self.run_in_executor(self.sync_get_claim_metadata_count, start_height, end_height) return await self.run_in_executor(self.sync_get_claim_metadata_count, start_height, end_height)
def sync_get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]: def sync_get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]:
sql = """
SELECT
name, claimID, activationHeight, expirationHeight,
(SELECT
CASE WHEN takeover.claimID = claim.claimID THEN takeover.height END
FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1
) AS takeoverHeight,
(SELECT CASE WHEN takeover.claimID = claim.claimID THEN 1 ELSE 0 END
FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1
) AS isControlling,
(SELECT find_shortest_id(c.claimid, claim.claimid) FROM claim AS c
WHERE
c.nodename = claim.nodename AND
c.originalheight <= claim.originalheight AND
c.claimid != claim.claimid
) AS shortestID
FROM claim
WHERE originalHeight BETWEEN ? AND ?
ORDER BY originalHeight, claimid
""", (start_height, end_height)
return [{ return [{
"name": r["name"], "name": r["name"],
"claim_hash_": r["claimID"], "claim_hash_": r["claimID"],
@ -184,30 +206,7 @@ class BlockchainDB:
"is_controlling": r["isControlling"], "is_controlling": r["isControlling"],
"short_url": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}', "short_url": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}',
"short_url_": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}', "short_url_": f'{normalize_name(r["name"].decode())}#{r["shortestID"] or r["claimID"][::-1].hex()[0]}',
} for r in self.sync_execute_fetchall( } for r in self.sync_execute_fetchall(*sql)]
"""
SELECT
name, claimID, activationHeight, expirationHeight,
(SELECT
CASE WHEN takeover.claimID = claim.claimID THEN takeover.height END
FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1
) AS takeoverHeight,
(SELECT CASE WHEN takeover.claimID = claim.claimID THEN 1 ELSE 0 END
FROM takeover WHERE takeover.name = claim.name
ORDER BY height DESC LIMIT 1
) AS isControlling,
(SELECT find_shortest_id(c.claimid, claim.claimid) FROM claim AS c
WHERE
c.nodename = claim.nodename AND
c.originalheight <= claim.originalheight AND
c.claimid != claim.claimid
) AS shortestID
FROM claim
WHERE originalHeight BETWEEN ? AND ?
ORDER BY originalHeight, claimid
""", (start_height, end_height)
)]
async def get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]: async def get_claim_metadata(self, start_height: int, end_height: int) -> List[dict]:
return await self.run_in_executor(self.sync_get_claim_metadata, start_height, end_height) return await self.run_in_executor(self.sync_get_claim_metadata, start_height, end_height)
@ -220,18 +219,16 @@ class BlockchainDB:
return await self.run_in_executor(self.sync_get_support_metadata_count, start_height, end_height) return await self.run_in_executor(self.sync_get_support_metadata_count, start_height, end_height)
def sync_get_support_metadata(self, start_height: int, end_height: int) -> List[dict]: def sync_get_support_metadata(self, start_height: int, end_height: int) -> List[dict]:
sql = """
SELECT name, txid, txn, activationHeight, expirationHeight
FROM support WHERE blockHeight BETWEEN ? AND ?
""", (start_height, end_height)
return [{ return [{
"name": r['name'], "name": r['name'],
"txo_hash_pk": r['txID'] + BCDataStream.uint32.pack(r['txN']), "txo_hash_pk": r['txID'] + BCDataStream.uint32.pack(r['txN']),
"activation_height": r['activationHeight'], "activation_height": r['activationHeight'],
"expiration_height": r['expirationHeight'], "expiration_height": r['expirationHeight'],
} for r in self.sync_execute_fetchall( } for r in self.sync_execute_fetchall(*sql)]
"""
SELECT name, txid, txn, activationHeight, expirationHeight
FROM support WHERE blockHeight BETWEEN ? AND ?
""", (start_height, end_height)
)
]
async def get_support_metadata(self, start_height: int, end_height: int) -> List[dict]: async def get_support_metadata(self, start_height: int, end_height: int) -> List[dict]:
return await self.run_in_executor(self.sync_get_support_metadata, start_height, end_height) return await self.run_in_executor(self.sync_get_support_metadata, start_height, end_height)

View file

@ -1,3 +1,4 @@
# pylint: disable=singleton-comparison
import os import os
import asyncio import asyncio
import logging import logging
@ -10,7 +11,7 @@ from sqlalchemy.future import select
from lbry.event import BroadcastSubscription from lbry.event import BroadcastSubscription
from lbry.service.base import Sync, BlockEvent from lbry.service.base import Sync, BlockEvent
from lbry.db import Database, queries, TXO_TYPES, CLAIM_TYPE_CODES from lbry.db import Database, queries, TXO_TYPES, CLAIM_TYPE_CODES
from lbry.db.tables import Claim, Takeover, Support, TXO, TX, TXI, Block as BlockTable from lbry.db.tables import Claim, Takeover, Support, TXO, Block as BlockTable
from lbry.db.query_context import progress, context, Event from lbry.db.query_context import progress, context, Event
from lbry.db.queries import rows_to_txos from lbry.db.queries import rows_to_txos
from lbry.db.sync import ( from lbry.db.sync import (

View file

@ -6,13 +6,13 @@ from datetime import date
from decimal import Decimal from decimal import Decimal
from binascii import unhexlify from binascii import unhexlify
from operator import itemgetter from operator import itemgetter
from typing import Tuple, List, Dict, Optional, Union from typing import Tuple, List, Dict, Optional
from sqlalchemy import union, func, text from sqlalchemy import union, func, text
from sqlalchemy.future import select, Select from sqlalchemy.future import select, Select
from lbry.schema.tags import clean_tags from lbry.schema.tags import clean_tags
from lbry.schema.result import Censor, Outputs from lbry.schema.result import Censor
from lbry.schema.url import URL, normalize_name from lbry.schema.url import URL, normalize_name
from lbry.error import ResolveCensoredError from lbry.error import ResolveCensoredError
from lbry.blockchain.transaction import Transaction, Output, OutputScript, TXRefImmutable from lbry.blockchain.transaction import Transaction, Output, OutputScript, TXRefImmutable
@ -20,7 +20,7 @@ from lbry.blockchain.transaction import Transaction, Output, OutputScript, TXRef
from .utils import query, in_account_ids from .utils import query, in_account_ids
from .query_context import context from .query_context import context
from .constants import ( from .constants import (
TXO_TYPES, CLAIM_TYPE_CODES, STREAM_TYPES, ATTRIBUTE_ARRAY_MAX_LENGTH, TXO_TYPES, STREAM_TYPES, ATTRIBUTE_ARRAY_MAX_LENGTH,
SEARCH_INTEGER_PARAMS, SEARCH_ORDER_FIELDS SEARCH_INTEGER_PARAMS, SEARCH_ORDER_FIELDS
) )
from .tables import ( from .tables import (
@ -726,7 +726,7 @@ def search_claim_count(**constraints) -> int:
def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]): def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]):
censor = context().get_resolve_censor() # censor = context().get_resolve_censor()
repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows))) repost_hashes = set(filter(None, map(itemgetter('reposted_claim_hash'), txo_rows)))
channel_hashes = set(itertools.chain( channel_hashes = set(itertools.chain(
filter(None, map(itemgetter('channel_hash'), txo_rows)), filter(None, map(itemgetter('channel_hash'), txo_rows)),
@ -735,12 +735,12 @@ def _get_referenced_rows(txo_rows: List[dict], censor_channels: List[bytes]):
reposted_txos = [] reposted_txos = []
if repost_hashes: if repost_hashes:
reposted_txos = search_claims(censor, **{'claim.claim_hash__in': repost_hashes}) reposted_txos = search_claims(**{'claim.claim_hash__in': repost_hashes})
channel_hashes |= set(filter(None, map(itemgetter('channel_hash'), reposted_txos))) channel_hashes |= set(filter(None, map(itemgetter('channel_hash'), reposted_txos)))
channel_txos = [] channel_txos = []
if channel_hashes: if channel_hashes:
channel_txos = search_claims(censor, **{'claim.claim_hash__in': channel_hashes}) channel_txos = search_claims(**{'claim.claim_hash__in': channel_hashes})
# channels must come first for client side inflation to work properly # channels must come first for client side inflation to work properly
return channel_txos + reposted_txos return channel_txos + reposted_txos

View file

@ -7,7 +7,7 @@ from typing import Dict, List, Optional, Tuple
from dataclasses import dataclass from dataclasses import dataclass
from contextvars import ContextVar from contextvars import ContextVar
from sqlalchemy import create_engine, inspect, bindparam, case from sqlalchemy import create_engine, inspect, bindparam
from sqlalchemy.engine import Engine, Connection from sqlalchemy.engine import Engine, Connection
from lbry.event import EventQueuePublisher from lbry.event import EventQueuePublisher

View file

@ -1,3 +1,4 @@
# pylint: disable=attribute-defined-outside-init
import os import os
import sys import sys
import json import json
@ -265,7 +266,8 @@ class UnitDBTestCase(AsyncioTestCase):
timestamp=99, bits=1, nonce=1, txs=txs timestamp=99, bits=1, nonce=1, txs=txs
) )
def coinbase(self): @staticmethod
def coinbase():
return ( return (
Transaction(height=0) Transaction(height=0)
.add_inputs([Input.create_coinbase()]) .add_inputs([Input.create_coinbase()])
@ -329,7 +331,8 @@ class UnitDBTestCase(AsyncioTestCase):
def abandon_claim(self, txo): def abandon_claim(self, txo):
return self.tx(amount='0.01', txi=Input.spend(txo)) return self.tx(amount='0.01', txi=Input.spend(txo))
def _set_channel_key(self, channel, key): @staticmethod
def _set_channel_key(channel, key):
private_key = ecdsa.SigningKey.from_string(key*32, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256) private_key = ecdsa.SigningKey.from_string(key*32, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256)
channel.private_key = private_key channel.private_key = private_key
channel.claim.channel.public_key_bytes = private_key.get_verifying_key().to_der() channel.claim.channel.public_key_bytes = private_key.get_verifying_key().to_der()

View file

@ -16,7 +16,7 @@ ignore=words,schema,migrator,extras,ui,api.py
max-parents=10 max-parents=10
max-args=10 max-args=10
max-line-length=120 max-line-length=120
good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv,ts,l,it,fp,q,p good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id,r,iv,ts,l,it,fp,q,p,pk
valid-metaclass-classmethod-first-arg=mcs valid-metaclass-classmethod-first-arg=mcs
disable= disable=
fixme, fixme,