forked from LBRYCommunity/lbry-sdk
test cleaup and fixing
This commit is contained in:
parent
aa75b9bb25
commit
35c3ff1e30
7 changed files with 32 additions and 15 deletions
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
|
@ -49,7 +49,7 @@ jobs:
|
|||
pip install -e .[test]
|
||||
- env:
|
||||
HOME: /tmp
|
||||
run: coverage run -m unittest -vv tests.unit.test_conf tests.unit.blockchain tests.unit.test_event_controller tests.unit.crypto tests.unit.schema tests.unit.db
|
||||
run: coverage run -m unittest -vv tests.unit.test_conf tests.unit.test_console tests.unit.test_event_controller tests/unit/blockchain/test* tests/unit/crypto/test* tests/unit/db/test* tests/unit/schema/test* tests/unit/service/test*
|
||||
# run: coverage run -m unittest discover -vv tests.unit
|
||||
- env:
|
||||
COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -54,6 +54,10 @@ class TXRefMutable(TXRef):
|
|||
def height(self):
|
||||
return self.tx.height
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return self.tx.timestamp
|
||||
|
||||
def reset(self):
|
||||
self._id = None
|
||||
self._hash = None
|
||||
|
|
|
@ -2,7 +2,7 @@ from sqlalchemy import text
|
|||
from sqlalchemy.future import select
|
||||
|
||||
from ..query_context import context
|
||||
from ..tables import SCHEMA_VERSION, metadata, Version, Claim, Support, Block
|
||||
from ..tables import SCHEMA_VERSION, metadata, Version, Claim, Support, Block, TX
|
||||
|
||||
|
||||
def execute(sql):
|
||||
|
@ -30,7 +30,7 @@ def insert_block(block):
|
|||
|
||||
|
||||
def insert_transaction(block_hash, tx):
|
||||
context().get_bulk_loader().add_transaction(block_hash, tx).flush()
|
||||
context().get_bulk_loader().add_transaction(block_hash, tx).flush(TX)
|
||||
|
||||
|
||||
def check_version_and_create_tables():
|
||||
|
|
|
@ -384,6 +384,7 @@ def rows_to_txos(rows: List[dict], include_tx=True) -> List[Output]:
|
|||
if row['tx_hash'] not in tx_cache:
|
||||
tx_cache[row['tx_hash']] = Transaction(
|
||||
row['raw'], height=row['height'], position=row['tx_position'],
|
||||
timestamp=row['timestamp'],
|
||||
is_verified=bool(row['is_verified']),
|
||||
)
|
||||
txo = tx_cache[row['tx_hash']].outputs[row['txo_position']]
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
from sqlalchemy.future import select
|
||||
|
||||
from lbry.db.query_context import progress, Event
|
||||
from lbry.db.tables import TXI, TXO, Claim, Support
|
||||
from lbry.db.tables import TX, TXI, TXO, Claim, Support
|
||||
from .constants import TXO_TYPES, CLAIM_TYPE_CODES
|
||||
from .queries import (
|
||||
BASE_SELECT_TXO_COLUMNS,
|
||||
rows_to_txos, where_unspent_txos,
|
||||
where_abandoned_supports,
|
||||
where_abandoned_claims
|
||||
|
@ -22,9 +23,9 @@ SUPPORT_DELETE_EVENT = Event.add("client.sync.supports.delete", "supports")
|
|||
def process_all_things_after_sync():
|
||||
with progress(SPENDS_UPDATE_EVENT) as p:
|
||||
p.start(2)
|
||||
set_input_addresses(p.ctx)
|
||||
p.step(1)
|
||||
update_spent_outputs(p.ctx)
|
||||
p.step(1)
|
||||
set_input_addresses(p.ctx)
|
||||
p.step(2)
|
||||
with progress(SUPPORT_DELETE_EVENT) as p:
|
||||
p.start(1)
|
||||
|
@ -32,7 +33,11 @@ def process_all_things_after_sync():
|
|||
p.ctx.execute(sql)
|
||||
with progress(SUPPORT_INSERT_EVENT) as p:
|
||||
loader = p.ctx.get_bulk_loader()
|
||||
sql = where_unspent_txos(TXO_TYPES['support'], missing_in_supports_table=True)
|
||||
sql = (
|
||||
select(*BASE_SELECT_TXO_COLUMNS)
|
||||
.where(where_unspent_txos(TXO_TYPES['support'], missing_in_supports_table=True))
|
||||
.select_from(TXO.join(TX))
|
||||
)
|
||||
for support in rows_to_txos(p.ctx.fetchall(sql)):
|
||||
loader.add_support(support)
|
||||
loader.flush(Support)
|
||||
|
@ -42,13 +47,21 @@ def process_all_things_after_sync():
|
|||
p.ctx.execute(sql)
|
||||
with progress(CLAIMS_INSERT_EVENT) as p:
|
||||
loader = p.ctx.get_bulk_loader()
|
||||
sql = where_unspent_txos(CLAIM_TYPE_CODES, missing_in_claims_table=True)
|
||||
sql = (
|
||||
select(*BASE_SELECT_TXO_COLUMNS)
|
||||
.where(where_unspent_txos(CLAIM_TYPE_CODES, missing_in_claims_table=True))
|
||||
.select_from(TXO.join(TX))
|
||||
)
|
||||
for claim in rows_to_txos(p.ctx.fetchall(sql)):
|
||||
loader.add_claim(claim)
|
||||
loader.add_claim(claim, '', 0, 0, 0, 0, staked_support_amount=0, staked_support_count=0)
|
||||
loader.flush(Claim)
|
||||
with progress(CLAIMS_UPDATE_EVENT) as p:
|
||||
loader = p.ctx.get_bulk_loader()
|
||||
sql = where_unspent_txos(CLAIM_TYPE_CODES, missing_or_stale_in_claims_table=True)
|
||||
sql = (
|
||||
select(*BASE_SELECT_TXO_COLUMNS)
|
||||
.where(where_unspent_txos(CLAIM_TYPE_CODES, missing_or_stale_in_claims_table=True))
|
||||
.select_from(TXO.join(TX))
|
||||
)
|
||||
for claim in rows_to_txos(p.ctx.fetchall(sql)):
|
||||
loader.update_claim(claim)
|
||||
loader.flush(Claim)
|
||||
|
|
|
@ -376,14 +376,14 @@ class UnitDBTestCase(AsyncioTestCase):
|
|||
async def get_txos(self):
|
||||
txos = []
|
||||
sql = (
|
||||
"select txo_hash, txo.position, is_spent from txo join tx using (tx_hash) "
|
||||
"select txo_hash, txo.position, spent_height from txo join tx using (tx_hash) "
|
||||
"order by tx.height, tx.position, txo.position"
|
||||
)
|
||||
for txo in await self.db.execute_fetchall(sql):
|
||||
txoid = hexlify(txo["txo_hash"][:32][::-1]).decode()
|
||||
txos.append((
|
||||
f"{txoid}:{txo['position']}",
|
||||
bool(txo['is_spent'])
|
||||
bool(txo['spent_height'])
|
||||
))
|
||||
return txos
|
||||
|
||||
|
|
|
@ -6,6 +6,5 @@ from lbry.db.query_context import Event
|
|||
class TestDBEvents(TestCase):
|
||||
|
||||
def test_enum(self):
|
||||
self.assertEqual(Event.BLOCK_READ.value, 1)
|
||||
self.assertEqual(Event.BLOCK_READ.label, "blockchain.sync.block.read")
|
||||
self.assertEqual(Event(1).label, "blockchain.sync.block.read")
|
||||
self.assertEqual(Event.get_by_id(1).name, "client.sync.claims.insert")
|
||||
self.assertEqual(Event.get_by_name("client.sync.claims.insert").id, 1)
|
||||
|
|
Loading…
Reference in a new issue