fix trending to use built-in sqlite instead of apsw

This commit is contained in:
Lex Berezhny 2021-06-15 16:51:50 -04:00
parent c9cf7fd4d4
commit e457b2f0d6
3 changed files with 9 additions and 8 deletions

View file

@ -76,7 +76,7 @@ async def make_es_index(index=None):
async def run(db_path, clients, blocks, shard, index_name='claims'): async def run(db_path, clients, blocks, shard, index_name='claims'):
db = sqlite3.connect(db_path, isolation_level=None, uri=True) db = sqlite3.connect(db_path, isolation_level=None, check_same_thread=False, uri=True)
db.execute('pragma journal_mode=wal;') db.execute('pragma journal_mode=wal;')
db.execute('pragma temp_store=memory;') db.execute('pragma temp_store=memory;')
producer = get_all(db, shard, clients, limit=blocks, index_name=index_name) producer = get_all(db, shard, clients, limit=blocks, index_name=index_name)

View file

@ -231,7 +231,7 @@ def run(db, height, final_height, recalculate_claim_hashes):
FROM claim FROM claim
WHERE claim_hash IN WHERE claim_hash IN
({','.join('?' for _ in recalculate_claim_hashes)}); ({','.join('?' for _ in recalculate_claim_hashes)});
""", recalculate_claim_hashes): """, list(recalculate_claim_hashes)):
trending_data.update_claim(row[0], 1E-8*row[1], time_boost) trending_data.update_claim(row[0], 1E-8*row[1], time_boost)
trending_log("done.\n") trending_log("done.\n")

View file

@ -5,6 +5,7 @@ decay rate for high valued claims.
import math import math
import time import time
import sqlite3
# Half life in blocks *for lower LBC claims* (it's shorter for whale claims) # Half life in blocks *for lower LBC claims* (it's shorter for whale claims)
HALF_LIFE = 200 HALF_LIFE = 200
@ -95,16 +96,16 @@ class TrendingDB:
""" """
def __init__(self): def __init__(self):
self.conn = apsw.Connection(":memory:") self.conn = sqlite3.connect(":memory:", check_same_thread=False)
self.cursor = self.conn.cursor() self.cursor = self.conn.cursor()
self.initialised = False self.initialised = False
self.write_needed = set() self.write_needed = set()
def execute(self, query, *args, **kwargs): def execute(self, query, *args, **kwargs):
return self.cursor.execute(query, *args, **kwargs) return self.conn.execute(query, *args, **kwargs)
def executemany(self, query, *args, **kwargs): def executemany(self, query, *args, **kwargs):
return self.cursor.executemany(query, *args, **kwargs) return self.conn.executemany(query, *args, **kwargs)
def begin(self): def begin(self):
self.execute("BEGIN;") self.execute("BEGIN;")
@ -250,7 +251,7 @@ class TrendingDB:
FROM claims FROM claims
WHERE claim_hash IN WHERE claim_hash IN
({','.join('?' for _ in self.write_needed)}); ({','.join('?' for _ in self.write_needed)});
""", self.write_needed).fetchall() """, list(self.write_needed)).fetchall()
db.executemany("""UPDATE claim SET trending_mixed = ? db.executemany("""UPDATE claim SET trending_mixed = ?
WHERE claim_hash = ?;""", rows) WHERE claim_hash = ?;""", rows)
@ -277,7 +278,7 @@ class TrendingDB:
FROM claim FROM claim
WHERE claim_hash IN WHERE claim_hash IN
({','.join('?' for _ in recalculate_claim_hashes)}); ({','.join('?' for _ in recalculate_claim_hashes)});
""", recalculate_claim_hashes): """, list(recalculate_claim_hashes)):
claim_hash, lbc = row claim_hash, lbc = row
# Insert into trending db if it does not exist # Insert into trending db if it does not exist
@ -349,7 +350,7 @@ class TrendingDB:
# The "global" instance to work with # The "global" instance to work with
# pylint: disable=C0103 # pylint: disable=C0103
#trending_data = TrendingDB() trending_data = TrendingDB()
def spike_mass(x, x_old): def spike_mass(x, x_old):
""" """