2015-09-04 22:22:02 +02:00
|
|
|
from twisted.internet import threads, defer
|
2015-08-20 17:27:15 +02:00
|
|
|
import json
|
2015-09-04 22:22:02 +02:00
|
|
|
import unqlite
|
|
|
|
import os
|
|
|
|
from twisted.enterprise import adbapi
|
|
|
|
from lbrynet.core.sqlite_helpers import rerun_if_locked
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
class BlindRepeaterSettings(object):
|
|
|
|
|
2015-09-04 22:22:02 +02:00
|
|
|
def __init__(self, db_dir):
|
|
|
|
self.db_dir = db_dir
|
|
|
|
self.unq_db = None
|
|
|
|
self.sql_db = None
|
|
|
|
|
|
|
|
def setup(self):
|
|
|
|
self.unq_db = unqlite.UnQLite(os.path.join(self.db_dir, "blind_settings.db"))
|
|
|
|
# check_same_thread=False is solely to quiet a spurious error that appears to be due
|
|
|
|
# to a bug in twisted, where the connection is closed by a different thread than the
|
|
|
|
# one that opened it. The individual connections in the pool are not used in multiple
|
|
|
|
# threads.
|
|
|
|
self.sql_db = adbapi.ConnectionPool('sqlite3', os.path.join(self.db_dir, "blind_peers.db"),
|
|
|
|
check_same_thread=False)
|
|
|
|
|
|
|
|
return self.sql_db.runQuery("create table if not exists approved_peers (" +
|
|
|
|
" ip_address text, " +
|
|
|
|
" port integer" +
|
|
|
|
")")
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.unq_db = None
|
|
|
|
self.sql_db = None
|
|
|
|
return defer.succeed(True)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def save_repeater_status(self, running):
|
|
|
|
def save_status():
|
2015-09-04 22:22:02 +02:00
|
|
|
self.unq_db["running"] = json.dumps(running)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
return threads.deferToThread(save_status)
|
|
|
|
|
|
|
|
def get_repeater_saved_status(self):
|
|
|
|
def get_status():
|
2015-09-04 22:22:02 +02:00
|
|
|
if "running" in self.unq_db:
|
|
|
|
return json.loads(self.unq_db['running'])
|
|
|
|
else:
|
2015-08-20 17:27:15 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return threads.deferToThread(get_status)
|
|
|
|
|
|
|
|
def save_max_space(self, max_space):
|
|
|
|
def save_space():
|
2015-09-04 22:22:02 +02:00
|
|
|
self.unq_db['max_space'] = json.dumps(max_space)
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
return threads.deferToThread(save_space)
|
|
|
|
|
|
|
|
def get_saved_max_space(self):
|
|
|
|
def get_space():
|
2015-09-04 22:22:02 +02:00
|
|
|
if 'max_space' in self.unq_db:
|
|
|
|
return json.loads(self.unq_db['max_space'])
|
|
|
|
else:
|
2015-08-20 17:27:15 +02:00
|
|
|
return 0
|
|
|
|
|
|
|
|
return threads.deferToThread(get_space)
|
|
|
|
|
2015-09-04 22:22:02 +02:00
|
|
|
@rerun_if_locked
|
2015-08-20 17:27:15 +02:00
|
|
|
def save_approved_peer(self, host, port):
|
2015-09-04 22:22:02 +02:00
|
|
|
return self.sql_db.runQuery("insert into approved_peers values (?, ?)",
|
|
|
|
(host, port))
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-04 22:22:02 +02:00
|
|
|
@rerun_if_locked
|
2015-08-20 17:27:15 +02:00
|
|
|
def remove_approved_peer(self, host, port):
|
2015-09-04 22:22:02 +02:00
|
|
|
return self.sql_db.runQuery("delete from approved_peers where ip_address = ? and port = ?",
|
|
|
|
(host, port))
|
2015-08-20 17:27:15 +02:00
|
|
|
|
2015-09-04 22:22:02 +02:00
|
|
|
@rerun_if_locked
|
2015-08-20 17:27:15 +02:00
|
|
|
def get_approved_peers(self):
|
2015-09-04 22:22:02 +02:00
|
|
|
return self.sql_db.runQuery("select * from approved_peers")
|
2015-08-20 17:27:15 +02:00
|
|
|
|
|
|
|
def get_data_payment_rate(self):
|
|
|
|
return threads.deferToThread(self._get_rate, "data_payment_rate")
|
|
|
|
|
|
|
|
def save_data_payment_rate(self, rate):
|
|
|
|
return threads.deferToThread(self._save_rate, "data_payment_rate", rate)
|
|
|
|
|
|
|
|
def get_valuable_info_payment_rate(self):
|
|
|
|
return threads.deferToThread(self._get_rate, "valuable_info_rate")
|
|
|
|
|
|
|
|
def save_valuable_info_payment_rate(self, rate):
|
|
|
|
return threads.deferToThread(self._save_rate, "valuable_info_rate", rate)
|
|
|
|
|
|
|
|
def get_valuable_hash_payment_rate(self):
|
|
|
|
return threads.deferToThread(self._get_rate, "valuable_hash_rate")
|
|
|
|
|
|
|
|
def save_valuable_hash_payment_rate(self, rate):
|
|
|
|
return threads.deferToThread(self._save_rate, "valuable_hash_rate", rate)
|
|
|
|
|
|
|
|
def _get_rate(self, rate_type):
|
2015-09-04 22:22:02 +02:00
|
|
|
if rate_type in self.unq_db:
|
|
|
|
return json.loads(self.unq_db[rate_type])
|
|
|
|
else:
|
2015-08-20 17:27:15 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def _save_rate(self, rate_type, rate):
|
|
|
|
if rate is not None:
|
2015-09-04 22:22:02 +02:00
|
|
|
self.unq_db[rate_type] = json.dumps(rate)
|
|
|
|
elif rate_type in self.unq_db:
|
|
|
|
del self.unq_db[rate_type]
|