fixes from review
This commit is contained in:
parent
f4a8be6c19
commit
c484a8abf5
4 changed files with 19 additions and 22 deletions
|
@ -283,17 +283,23 @@ def update_channel_stats(blocks: Tuple[int, int], initial_sync: int, p: Progress
|
||||||
p.step(result.rowcount)
|
p.step(result.rowcount)
|
||||||
|
|
||||||
|
|
||||||
|
def select_reposts(channel_hashes, filter_type=0):
|
||||||
|
return (
|
||||||
|
select(Claim.c.reposted_claim_hash, filter_type, Claim.c.channel_hash).where(
|
||||||
|
(Claim.c.channel_hash.in_(channel_hashes)) &
|
||||||
|
(Claim.c.reposted_claim_hash.isnot(None))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@event_emitter("blockchain.sync.claims.filters", "claim_filters")
|
@event_emitter("blockchain.sync.claims.filters", "claim_filters")
|
||||||
def update_claim_filters(blocking_channel_hashes, filtering_channel_hashes, p: ProgressContext):
|
def update_claim_filters(blocking_channel_hashes, filtering_channel_hashes, p: ProgressContext):
|
||||||
def select_reposts(channel_hashes, filter_type=0):
|
|
||||||
return select(
|
|
||||||
Claim.c.reposted_claim_hash, filter_type, Claim.c.channel_hash).where(
|
|
||||||
(Claim.c.channel_hash.in_(channel_hashes)) & (Claim.c.reposted_claim_hash.isnot(None)))
|
|
||||||
|
|
||||||
p.ctx.execute(ClaimFilter.delete())
|
p.ctx.execute(ClaimFilter.delete())
|
||||||
# order matters: first we insert the blocked ones. Then the filtered ones.
|
# order matters: first we insert the blocked ones. Then the filtered ones.
|
||||||
# If there is already a block in place, that takes priority because a block is just a harder filter
|
# If there is already a block in place, that takes priority because a block is just a harder filter
|
||||||
p.ctx.execute(ClaimFilter.insert().from_select(
|
p.ctx.execute(ClaimFilter.insert().from_select(
|
||||||
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(blocking_channel_hashes, 2)))
|
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(blocking_channel_hashes, 2))
|
||||||
|
)
|
||||||
p.ctx.execute(p.ctx.insert_or_ignore(ClaimFilter).from_select(
|
p.ctx.execute(p.ctx.insert_or_ignore(ClaimFilter).from_select(
|
||||||
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(filtering_channel_hashes, 1)))
|
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(filtering_channel_hashes, 1))
|
||||||
|
)
|
||||||
|
|
|
@ -55,11 +55,11 @@ class BlockchainSync(Sync):
|
||||||
self.tx_hash_event = asyncio.Event()
|
self.tx_hash_event = asyncio.Event()
|
||||||
self.mempool = []
|
self.mempool = []
|
||||||
self.filtering_channel_hashes = {
|
self.filtering_channel_hashes = {
|
||||||
unhexlify(channel_id)[::-1] for channel_id in
|
unhexlify(channel_id)[::-1] for channel_id in self.conf.spv_filtering_channel_ids
|
||||||
os.getenv('FILTERING_CHANNEL_IDS', '').split(' ') if channel_id}
|
}
|
||||||
self.blocking_channel_hashes = {
|
self.blocking_channel_hashes = {
|
||||||
unhexlify(channel_id)[::-1] for channel_id in
|
unhexlify(channel_id)[::-1] for channel_id in self.conf.spv_blocking_channel_ids
|
||||||
os.getenv('BLOCKING_CHANNEL_IDS', '').split(' ') if channel_id}
|
}
|
||||||
|
|
||||||
async def wait_for_chain_ready(self):
|
async def wait_for_chain_ready(self):
|
||||||
while True:
|
while True:
|
||||||
|
|
|
@ -631,6 +631,8 @@ class Config(CLIConfig):
|
||||||
"light client to synchronize with a full node.",
|
"light client to synchronize with a full node.",
|
||||||
True
|
True
|
||||||
)
|
)
|
||||||
|
spv_filtering_channel_ids = Strings("List of channel claim ids for filtering claim search results out.", [])
|
||||||
|
spv_blocking_channel_ids = Strings("List of channel claim ids for blocking resolve results.", [])
|
||||||
|
|
||||||
# daemon
|
# daemon
|
||||||
save_files = Toggle("Save downloaded files when calling `get` by default", True)
|
save_files = Toggle("Save downloaded files when calling `get` by default", True)
|
||||||
|
|
|
@ -312,17 +312,6 @@ class Database:
|
||||||
return await self.fetch_result(q.get_purchases, **constraints)
|
return await self.fetch_result(q.get_purchases, **constraints)
|
||||||
|
|
||||||
async def search_claims(self, **constraints) -> Result[Output]:
|
async def search_claims(self, **constraints) -> Result[Output]:
|
||||||
if 'channel' in constraints:
|
|
||||||
channel_url = constraints.pop('channel')
|
|
||||||
match = await self.resolve([channel_url])
|
|
||||||
if isinstance(match, dict):
|
|
||||||
for value in match.values():
|
|
||||||
if isinstance(value, Output):
|
|
||||||
constraints['channel_hash'] = value.claim_hash
|
|
||||||
else:
|
|
||||||
return Result([], 0)
|
|
||||||
else:
|
|
||||||
return Result([], 0)
|
|
||||||
#assert set(constraints).issubset(SEARCH_PARAMS), \
|
#assert set(constraints).issubset(SEARCH_PARAMS), \
|
||||||
# f"Search query contains invalid arguments: {set(constraints).difference(SEARCH_PARAMS)}"
|
# f"Search query contains invalid arguments: {set(constraints).difference(SEARCH_PARAMS)}"
|
||||||
claims, total, censor = await self.run(q.search_claims, **constraints)
|
claims, total, censor = await self.run(q.search_claims, **constraints)
|
||||||
|
|
Loading…
Add table
Reference in a new issue