forked from LBRYCommunity/lbry-sdk
delete sqlite fts
This commit is contained in:
parent
bf44befff6
commit
dd412c0f50
3 changed files with 2 additions and 74 deletions
lbry/wallet/server/db
|
@ -1,52 +0,0 @@
|
||||||
from lbry.wallet.database import constraints_to_sql
|
|
||||||
|
|
||||||
CREATE_FULL_TEXT_SEARCH = """
|
|
||||||
create virtual table if not exists search using fts5(
|
|
||||||
claim_name, channel_name, title, description, author, tags,
|
|
||||||
content=claim, tokenize=porter
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
|
|
||||||
FTS_ORDER_BY = "bm25(search, 4.0, 8.0, 1.0, 0.5, 1.0, 0.5)"
|
|
||||||
|
|
||||||
|
|
||||||
def fts_action_sql(claims=None, action='insert'):
|
|
||||||
select = {
|
|
||||||
'rowid': "claim.rowid",
|
|
||||||
'claim_name': "claim.normalized",
|
|
||||||
'channel_name': "channel.normalized",
|
|
||||||
'title': "claim.title",
|
|
||||||
'description': "claim.description",
|
|
||||||
'author': "claim.author",
|
|
||||||
'tags': "(select group_concat(tag, ' ') from tag where tag.claim_hash=claim.claim_hash)"
|
|
||||||
}
|
|
||||||
if action == 'delete':
|
|
||||||
select['search'] = '"delete"'
|
|
||||||
|
|
||||||
where, values = "", {}
|
|
||||||
if claims:
|
|
||||||
where, values = constraints_to_sql({'claim.claim_hash__in': claims})
|
|
||||||
where = 'WHERE '+where
|
|
||||||
|
|
||||||
return f"""
|
|
||||||
INSERT INTO search ({','.join(select.keys())})
|
|
||||||
SELECT {','.join(select.values())} FROM claim
|
|
||||||
LEFT JOIN claim as channel ON (claim.channel_hash=channel.claim_hash) {where}
|
|
||||||
""", values
|
|
||||||
|
|
||||||
|
|
||||||
def update_full_text_search(action, outputs, db, is_first_sync):
|
|
||||||
if is_first_sync:
|
|
||||||
return
|
|
||||||
if not outputs:
|
|
||||||
return
|
|
||||||
if action in ("before-delete", "before-update"):
|
|
||||||
db.execute(*fts_action_sql(outputs, 'delete'))
|
|
||||||
elif action in ("after-insert", "after-update"):
|
|
||||||
db.execute(*fts_action_sql(outputs, 'insert'))
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid action for updating full text search: '{action}'")
|
|
||||||
|
|
||||||
|
|
||||||
def first_sync_finished(db):
|
|
||||||
db.execute(*fts_action_sql())
|
|
|
@ -19,7 +19,6 @@ from lbry.schema.result import Outputs, Censor
|
||||||
from lbry.wallet import Ledger, RegTestLedger
|
from lbry.wallet import Ledger, RegTestLedger
|
||||||
|
|
||||||
from .common import CLAIM_TYPES, STREAM_TYPES, COMMON_TAGS, INDEXED_LANGUAGES
|
from .common import CLAIM_TYPES, STREAM_TYPES, COMMON_TAGS, INDEXED_LANGUAGES
|
||||||
from .full_text_search import FTS_ORDER_BY
|
|
||||||
|
|
||||||
|
|
||||||
class SQLiteOperationalError(apsw.Error):
|
class SQLiteOperationalError(apsw.Error):
|
||||||
|
@ -342,12 +341,7 @@ def claims_query(cols, for_count=False, **constraints) -> Tuple[str, Dict]:
|
||||||
_apply_constraints_for_array_attributes(constraints, 'language', lambda _: _, for_count)
|
_apply_constraints_for_array_attributes(constraints, 'language', lambda _: _, for_count)
|
||||||
_apply_constraints_for_array_attributes(constraints, 'location', lambda _: _, for_count)
|
_apply_constraints_for_array_attributes(constraints, 'location', lambda _: _, for_count)
|
||||||
|
|
||||||
if 'text' in constraints:
|
select = f"SELECT {cols} FROM claim"
|
||||||
constraints["search"] = constraints.pop("text")
|
|
||||||
constraints["order_by"] = FTS_ORDER_BY
|
|
||||||
select = f"SELECT {cols} FROM search JOIN claim ON (search.rowid=claim.rowid)"
|
|
||||||
else:
|
|
||||||
select = f"SELECT {cols} FROM claim"
|
|
||||||
if not for_count:
|
if not for_count:
|
||||||
select += " LEFT JOIN claimtrie USING (claim_hash)"
|
select += " LEFT JOIN claimtrie USING (claim_hash)"
|
||||||
return query(select, **constraints)
|
return query(select, **constraints)
|
||||||
|
|
|
@ -15,7 +15,6 @@ from lbry.schema.mime_types import guess_stream_type
|
||||||
from lbry.wallet import Ledger, RegTestLedger
|
from lbry.wallet import Ledger, RegTestLedger
|
||||||
from lbry.wallet.transaction import Transaction, Output
|
from lbry.wallet.transaction import Transaction, Output
|
||||||
from lbry.wallet.server.db.canonical import register_canonical_functions
|
from lbry.wallet.server.db.canonical import register_canonical_functions
|
||||||
from lbry.wallet.server.db.full_text_search import update_full_text_search, CREATE_FULL_TEXT_SEARCH, first_sync_finished
|
|
||||||
from lbry.wallet.server.db.trending import TRENDING_ALGORITHMS
|
from lbry.wallet.server.db.trending import TRENDING_ALGORITHMS
|
||||||
|
|
||||||
from .common import CLAIM_TYPES, STREAM_TYPES, COMMON_TAGS, INDEXED_LANGUAGES
|
from .common import CLAIM_TYPES, STREAM_TYPES, COMMON_TAGS, INDEXED_LANGUAGES
|
||||||
|
@ -201,7 +200,6 @@ class SQLDB:
|
||||||
|
|
||||||
CREATE_TABLES_QUERY = (
|
CREATE_TABLES_QUERY = (
|
||||||
CREATE_CLAIM_TABLE +
|
CREATE_CLAIM_TABLE +
|
||||||
CREATE_FULL_TEXT_SEARCH +
|
|
||||||
CREATE_SUPPORT_TABLE +
|
CREATE_SUPPORT_TABLE +
|
||||||
CREATE_CLAIMTRIE_TABLE +
|
CREATE_CLAIMTRIE_TABLE +
|
||||||
CREATE_TAG_TABLE +
|
CREATE_TAG_TABLE +
|
||||||
|
@ -216,7 +214,6 @@ class SQLDB:
|
||||||
self.db = None
|
self.db = None
|
||||||
self.logger = class_logger(__name__, self.__class__.__name__)
|
self.logger = class_logger(__name__, self.__class__.__name__)
|
||||||
self.ledger = Ledger if main.coin.NET == 'mainnet' else RegTestLedger
|
self.ledger = Ledger if main.coin.NET == 'mainnet' else RegTestLedger
|
||||||
self._fts_synced = False
|
|
||||||
self.state_manager = None
|
self.state_manager = None
|
||||||
self.blocked_streams = None
|
self.blocked_streams = None
|
||||||
self.blocked_channels = None
|
self.blocked_channels = None
|
||||||
|
@ -930,28 +927,17 @@ class SQLDB:
|
||||||
expire_timer.stop()
|
expire_timer.stop()
|
||||||
|
|
||||||
r = timer.run
|
r = timer.run
|
||||||
r(update_full_text_search, 'before-delete',
|
|
||||||
delete_claim_hashes, self.db.cursor(), self.main.first_sync)
|
|
||||||
affected_channels = r(self.delete_claims, delete_claim_hashes)
|
affected_channels = r(self.delete_claims, delete_claim_hashes)
|
||||||
r(self.delete_supports, delete_support_txo_hashes)
|
r(self.delete_supports, delete_support_txo_hashes)
|
||||||
r(self.insert_claims, insert_claims, header)
|
r(self.insert_claims, insert_claims, header)
|
||||||
reposted = r(self.calculate_reposts, insert_claims)
|
r(self.calculate_reposts, insert_claims)
|
||||||
r(update_full_text_search, 'after-insert',
|
|
||||||
[txo.claim_hash for txo in insert_claims], self.db.cursor(), self.main.first_sync)
|
|
||||||
r(update_full_text_search, 'before-update',
|
|
||||||
[txo.claim_hash for txo in update_claims], self.db.cursor(), self.main.first_sync)
|
|
||||||
r(self.update_claims, update_claims, header)
|
r(self.update_claims, update_claims, header)
|
||||||
r(update_full_text_search, 'after-update',
|
|
||||||
[txo.claim_hash for txo in update_claims], self.db.cursor(), self.main.first_sync)
|
|
||||||
r(self.validate_channel_signatures, height, insert_claims,
|
r(self.validate_channel_signatures, height, insert_claims,
|
||||||
update_claims, delete_claim_hashes, affected_channels, forward_timer=True)
|
update_claims, delete_claim_hashes, affected_channels, forward_timer=True)
|
||||||
r(self.insert_supports, insert_supports)
|
r(self.insert_supports, insert_supports)
|
||||||
r(self.update_claimtrie, height, recalculate_claim_hashes, deleted_claim_names, forward_timer=True)
|
r(self.update_claimtrie, height, recalculate_claim_hashes, deleted_claim_names, forward_timer=True)
|
||||||
for algorithm in self.trending:
|
for algorithm in self.trending:
|
||||||
r(algorithm.run, self.db.cursor(), height, daemon_height, recalculate_claim_hashes)
|
r(algorithm.run, self.db.cursor(), height, daemon_height, recalculate_claim_hashes)
|
||||||
if not self._fts_synced and self.main.first_sync and height == daemon_height:
|
|
||||||
r(first_sync_finished, self.db.cursor())
|
|
||||||
self._fts_synced = True
|
|
||||||
r(self.enqueue_deleted, delete_claim_hashes)
|
r(self.enqueue_deleted, delete_claim_hashes)
|
||||||
r(self.enqueue_changes)
|
r(self.enqueue_changes)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue