This commit is contained in:
Victor Shyba 2019-08-26 18:23:43 -03:00
parent aec0bd5d11
commit 9df659b647
4 changed files with 25 additions and 17 deletions

View file

@ -1,3 +1,4 @@
import os
import sqlite3
from typing import Union, Tuple, Set, List
from itertools import chain
@ -705,7 +706,8 @@ class LBRYDB(DB):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.sql = SQLDB(self, 'claims.db')
path = os.path.join(self.env.db_dir, 'claims.db')
self.sql = SQLDB(self, path)
def close(self):
super().close()

View file

@ -64,9 +64,10 @@ class LBRYSessionManager(SessionManager):
async def start_other(self):
self.running = True
path = os.path.join(self.env.db_dir, 'claims.db')
args = dict(
initializer=reader.initializer,
initargs=(self.logger, 'claims.db', self.env.coin.NET, self.env.database_query_timeout,
initargs=(self.logger, path, self.env.coin.NET, self.env.database_query_timeout,
self.env.track_metrics)
)
if self.env.max_query_workers is not None and self.env.max_query_workers == 0:

View file

@ -17,6 +17,7 @@ import time
from asyncio import sleep
from bisect import bisect_right
from collections import namedtuple
from functools import partial
from glob import glob
from struct import pack, unpack
@ -72,9 +73,8 @@ class DB:
self.header_len = self.dynamic_header_len
self.logger.info(f'switching current directory to {env.db_dir}')
os.chdir(env.db_dir)
self.db_class = db_class(self.env.db_engine)
self.db_class = db_class(env.db_dir, self.env.db_engine)
self.history = History()
self.utxo_db = None
self.tx_counts = None
@ -86,12 +86,13 @@ class DB:
self.merkle = Merkle()
self.header_mc = MerkleCache(self.merkle, self.fs_block_hashes)
self.headers_file = util.LogicalFile('meta/headers', 2, 16000000)
self.tx_counts_file = util.LogicalFile('meta/txcounts', 2, 2000000)
self.hashes_file = util.LogicalFile('meta/hashes', 4, 16000000)
path = partial(os.path.join, self.env.db_dir)
self.headers_file = util.LogicalFile(path('meta/headers'), 2, 16000000)
self.tx_counts_file = util.LogicalFile(path('meta/txcounts'), 2, 2000000)
self.hashes_file = util.LogicalFile(path('meta/hashes'), 4, 16000000)
if not self.coin.STATIC_BLOCK_HEADERS:
self.headers_offsets_file = util.LogicalFile(
'meta/headers_offsets', 2, 16000000)
path('meta/headers_offsets'), 2, 16000000)
async def _read_tx_counts(self):
if self.tx_counts is not None:
@ -115,8 +116,9 @@ class DB:
if self.utxo_db.is_new:
self.logger.info('created new database')
self.logger.info('creating metadata directory')
os.mkdir('meta')
with util.open_file('COIN', create=True) as f:
os.mkdir(os.path.join(self.env.db_dir, 'meta'))
coin_path = os.path.join(self.env.db_dir, 'meta', 'COIN')
with util.open_file(coin_path, create=True) as f:
f.write(f'ElectrumX databases and metadata for '
f'{self.coin.NAME} {self.coin.NET}'.encode())
if not self.coin.STATIC_BLOCK_HEADERS:
@ -474,7 +476,7 @@ class DB:
return 'meta/block'
def raw_block_path(self, height):
return f'{self.raw_block_prefix()}{height:d}'
return os.path.join(self.env.db_dir, f'{self.raw_block_prefix()}{height:d}')
def read_raw_block(self, height):
"""Returns a raw block read from disk. Raises FileNotFoundError

View file

@ -13,20 +13,21 @@ from functools import partial
from torba.server import util
def db_class(name):
def db_class(db_dir, name):
"""Returns a DB engine class."""
for db_class in util.subclasses(Storage):
if db_class.__name__.lower() == name.lower():
db_class.import_module()
return db_class
return partial(db_class, db_dir)
raise RuntimeError('unrecognised DB engine "{}"'.format(name))
class Storage:
"""Abstract base class of the DB backend abstraction."""
def __init__(self, name, for_sync):
self.is_new = not os.path.exists(name)
def __init__(self, db_dir, name, for_sync):
self.db_dir = db_dir
self.is_new = not os.path.exists(os.path.join(db_dir, name))
self.for_sync = for_sync or self.is_new
self.open(name, create=self.is_new)
@ -78,8 +79,9 @@ class LevelDB(Storage):
def open(self, name, create):
mof = 512 if self.for_sync else 128
path = os.path.join(self.db_dir, name)
# Use snappy compression (the default)
self.db = self.module.DB(name, create_if_missing=create,
self.db = self.module.DB(path, create_if_missing=create,
max_open_files=mof)
self.close = self.db.close
self.get = self.db.get
@ -99,12 +101,13 @@ class RocksDB(Storage):
def open(self, name, create):
mof = 512 if self.for_sync else 128
path = os.path.join(self.db_dir, name)
# Use snappy compression (the default)
options = self.module.Options(create_if_missing=create,
use_fsync=True,
target_file_size_base=33554432,
max_open_files=mof)
self.db = self.module.DB(name, options)
self.db = self.module.DB(path, options)
self.get = self.db.get
self.put = self.db.put