fixes from review

This commit is contained in:
Victor Shyba 2020-11-25 12:33:29 -03:00 committed by Lex Berezhny
parent f4a8be6c19
commit c484a8abf5
4 changed files with 19 additions and 22 deletions

View file

@ -283,17 +283,23 @@ def update_channel_stats(blocks: Tuple[int, int], initial_sync: int, p: Progress
p.step(result.rowcount)
def select_reposts(channel_hashes, filter_type=0):
return (
select(Claim.c.reposted_claim_hash, filter_type, Claim.c.channel_hash).where(
(Claim.c.channel_hash.in_(channel_hashes)) &
(Claim.c.reposted_claim_hash.isnot(None))
)
)
@event_emitter("blockchain.sync.claims.filters", "claim_filters")
def update_claim_filters(blocking_channel_hashes, filtering_channel_hashes, p: ProgressContext):
def select_reposts(channel_hashes, filter_type=0):
return select(
Claim.c.reposted_claim_hash, filter_type, Claim.c.channel_hash).where(
(Claim.c.channel_hash.in_(channel_hashes)) & (Claim.c.reposted_claim_hash.isnot(None)))
p.ctx.execute(ClaimFilter.delete())
# order matters: first we insert the blocked ones. Then the filtered ones.
# If there is already a block in place, that takes priority because a block is just a harder filter
p.ctx.execute(ClaimFilter.insert().from_select(
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(blocking_channel_hashes, 2)))
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(blocking_channel_hashes, 2))
)
p.ctx.execute(p.ctx.insert_or_ignore(ClaimFilter).from_select(
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(filtering_channel_hashes, 1)))
['claim_hash', 'filter_type', 'owner_channel_hash'], select_reposts(filtering_channel_hashes, 1))
)

View file

@ -55,11 +55,11 @@ class BlockchainSync(Sync):
self.tx_hash_event = asyncio.Event()
self.mempool = []
self.filtering_channel_hashes = {
unhexlify(channel_id)[::-1] for channel_id in
os.getenv('FILTERING_CHANNEL_IDS', '').split(' ') if channel_id}
unhexlify(channel_id)[::-1] for channel_id in self.conf.spv_filtering_channel_ids
}
self.blocking_channel_hashes = {
unhexlify(channel_id)[::-1] for channel_id in
os.getenv('BLOCKING_CHANNEL_IDS', '').split(' ') if channel_id}
unhexlify(channel_id)[::-1] for channel_id in self.conf.spv_blocking_channel_ids
}
async def wait_for_chain_ready(self):
while True:

View file

@ -631,6 +631,8 @@ class Config(CLIConfig):
"light client to synchronize with a full node.",
True
)
spv_filtering_channel_ids = Strings("List of channel claim ids for filtering claim search results out.", [])
spv_blocking_channel_ids = Strings("List of channel claim ids for blocking resolve results.", [])
# daemon
save_files = Toggle("Save downloaded files when calling `get` by default", True)

View file

@ -312,17 +312,6 @@ class Database:
return await self.fetch_result(q.get_purchases, **constraints)
async def search_claims(self, **constraints) -> Result[Output]:
if 'channel' in constraints:
channel_url = constraints.pop('channel')
match = await self.resolve([channel_url])
if isinstance(match, dict):
for value in match.values():
if isinstance(value, Output):
constraints['channel_hash'] = value.claim_hash
else:
return Result([], 0)
else:
return Result([], 0)
#assert set(constraints).issubset(SEARCH_PARAMS), \
# f"Search query contains invalid arguments: {set(constraints).difference(SEARCH_PARAMS)}"
claims, total, censor = await self.run(q.search_claims, **constraints)