Moves schema/ to src/schema/
This commit is contained in:
parent
9c184ee05d
commit
a8d5e4b52d
6 changed files with 9 additions and 103 deletions
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"PATH": {
|
"PATH": {
|
||||||
"SCHEMA": "schema/comments_ddl.sql",
|
"SCHEMA": "src/schema/comments_ddl.sql",
|
||||||
"MAIN": "database/comments.db",
|
"MAIN": "database/comments.db",
|
||||||
"BACKUP": "database/comments.backup.db",
|
"BACKUP": "database/comments.backup.db",
|
||||||
"DEFAULT": "database/default.db",
|
"DEFAULT": "database/default.db",
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
|
|
||||||
PRAGMA FOREIGN_KEYS = ON;
|
|
||||||
|
|
||||||
-- tables
|
|
||||||
-- DROP TABLE IF EXISTS COMMENT;
|
|
||||||
-- DROP TABLE IF EXISTS CHANNEL;
|
|
||||||
|
|
||||||
-- DROP TABLE IF EXISTS COMMENT;
|
|
||||||
CREATE TABLE IF NOT EXISTS COMMENT (
|
|
||||||
CommentId TEXT NOT NULL,
|
|
||||||
LbryClaimId TEXT NOT NULL,
|
|
||||||
ChannelId TEXT DEFAULT NULL,
|
|
||||||
Body TEXT NOT NULL,
|
|
||||||
ParentId TEXT DEFAULT NULL,
|
|
||||||
Signature TEXT DEFAULT NULL,
|
|
||||||
Timestamp INTEGER NOT NULL,
|
|
||||||
SigningTs TEXT DEFAULT NULL,
|
|
||||||
CONSTRAINT COMMENT_PRIMARY_KEY PRIMARY KEY (CommentId) ON CONFLICT IGNORE,
|
|
||||||
CONSTRAINT COMMENT_SIGNATURE_SK UNIQUE (Signature) ON CONFLICT ABORT,
|
|
||||||
CONSTRAINT COMMENT_CHANNEL_FK FOREIGN KEY (ChannelId) REFERENCES CHANNEL(ClaimId)
|
|
||||||
ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
CONSTRAINT COMMENT_PARENT_FK FOREIGN KEY (ParentId) REFERENCES COMMENT(CommentId)
|
|
||||||
ON UPDATE CASCADE ON DELETE NO ACTION -- setting null implies comment is top level
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ALTER TABLE COMMENT ADD COLUMN SigningTs TEXT DEFAULT NULL;
|
|
||||||
|
|
||||||
-- DROP TABLE IF EXISTS CHANNEL;
|
|
||||||
CREATE TABLE IF NOT EXISTS CHANNEL(
|
|
||||||
ClaimId TEXT NOT NULL,
|
|
||||||
Name TEXT NOT NULL,
|
|
||||||
CONSTRAINT CHANNEL_PK PRIMARY KEY (ClaimId)
|
|
||||||
ON CONFLICT IGNORE
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
-- indexes
|
|
||||||
-- DROP INDEX IF EXISTS COMMENT_CLAIM_INDEX;
|
|
||||||
CREATE INDEX IF NOT EXISTS CLAIM_COMMENT_INDEX ON COMMENT (LbryClaimId, CommentId);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS CHANNEL_COMMENT_INDEX ON COMMENT(ChannelId, CommentId);
|
|
||||||
|
|
||||||
-- VIEWS
|
|
||||||
DROP VIEW IF EXISTS COMMENTS_ON_CLAIMS;
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENTS_ON_CLAIMS (comment_id, claim_id, timestamp, channel_name, channel_id, channel_url, signature, signing_ts, parent_id, comment) AS
|
|
||||||
SELECT C.CommentId, C.LbryClaimId, C.Timestamp, CHAN.Name, CHAN.ClaimId, 'lbry://' || CHAN.Name || '#' || CHAN.ClaimId, C.Signature, C.SigningTs, C.ParentId, C.Body
|
|
||||||
FROM COMMENT AS C
|
|
||||||
LEFT OUTER JOIN CHANNEL CHAN on C.ChannelId = CHAN.ClaimId
|
|
||||||
ORDER BY C.Timestamp DESC;
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
DROP VIEW IF EXISTS COMMENT_REPLIES;
|
|
||||||
CREATE VIEW IF NOT EXISTS COMMENT_REPLIES (Author, CommentBody, ParentAuthor, ParentCommentBody) AS
|
|
||||||
SELECT AUTHOR.Name, OG.Body, PCHAN.Name, PARENT.Body FROM COMMENT AS OG
|
|
||||||
JOIN COMMENT AS PARENT
|
|
||||||
ON OG.ParentId = PARENT.CommentId
|
|
||||||
JOIN CHANNEL AS PCHAN ON PARENT.ChannelId = PCHAN.ClaimId
|
|
||||||
JOIN CHANNEL AS AUTHOR ON OG.ChannelId = AUTHOR.ClaimId
|
|
||||||
ORDER BY OG.Timestamp;
|
|
||||||
|
|
||||||
-- this is the default channel for anyone who wants to publish anonymously
|
|
||||||
-- INSERT INTO CHANNEL
|
|
||||||
-- VALUES ('9cb713f01bf247a0e03170b5ed00d5161340c486', '@Anonymous');
|
|
|
@ -1,31 +0,0 @@
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
from src.settings import config
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_database(db_path):
|
|
||||||
logger.info('Creating db schema from %s in %s',
|
|
||||||
config['PATH']['SCHEMA'], db_path)
|
|
||||||
with sqlite3.connect(db_path) as conn:
|
|
||||||
with open(config['PATH']['SCHEMA'], 'r') as ddl:
|
|
||||||
with conn:
|
|
||||||
conn.executescript(ddl.read())
|
|
||||||
|
|
||||||
|
|
||||||
def teardown_database(db_path):
|
|
||||||
logger.info('Dropping all tables from %s', db_path)
|
|
||||||
with sqlite3.connect(db_path) as conn:
|
|
||||||
conn.executescript("""
|
|
||||||
DROP VIEW IF EXISTS COMMENTS_ON_CLAIMS;
|
|
||||||
DROP VIEW IF EXISTS COMMENT_REPLIES;
|
|
||||||
DROP TABLE IF EXISTS COMMENT;
|
|
||||||
DROP TABLE IF EXISTS CHANNEL;
|
|
||||||
""")
|
|
||||||
|
|
||||||
|
|
||||||
def backup_database(conn: sqlite3.Connection, back_fp):
|
|
||||||
with sqlite3.connect(back_fp) as back:
|
|
||||||
conn.backup(back)
|
|
|
@ -9,7 +9,7 @@ import aiojobs.aiohttp
|
||||||
import asyncio
|
import asyncio
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
import schema.db_helpers
|
from src.schema.db_helpers import setup_database, backup_database
|
||||||
from src.database import obtain_connection, DatabaseWriter
|
from src.database import obtain_connection, DatabaseWriter
|
||||||
from src.handles import api_endpoint, get_api_endpoint
|
from src.handles import api_endpoint, get_api_endpoint
|
||||||
|
|
||||||
|
@ -19,9 +19,9 @@ logger = logging.getLogger(__name__)
|
||||||
async def setup_db_schema(app):
|
async def setup_db_schema(app):
|
||||||
if not pathlib.Path(app['db_path']).exists():
|
if not pathlib.Path(app['db_path']).exists():
|
||||||
logger.info('Setting up schema in %s', app['db_path'])
|
logger.info('Setting up schema in %s', app['db_path'])
|
||||||
schema.db_helpers.setup_database(app['db_path'])
|
setup_database(app['db_path'], app['config']['PATH']['SCHEMA'])
|
||||||
else:
|
else:
|
||||||
logger.info('Database already exists in %s, skipping setup', app['db_path'])
|
logger.info(f'Database already exists in {app["db_path"]}, skipping setup')
|
||||||
|
|
||||||
|
|
||||||
async def close_comment_scheduler(app):
|
async def close_comment_scheduler(app):
|
||||||
|
@ -35,7 +35,7 @@ async def database_backup_routine(app):
|
||||||
await asyncio.sleep(app['config']['BACKUP_INT'])
|
await asyncio.sleep(app['config']['BACKUP_INT'])
|
||||||
with app['reader'] as conn:
|
with app['reader'] as conn:
|
||||||
logger.debug('backing up database')
|
logger.debug('backing up database')
|
||||||
schema.db_helpers.backup_database(conn, app['backup'])
|
backup_database(conn, app['backup'])
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,9 @@ from faker.providers import internet
|
||||||
from faker.providers import lorem
|
from faker.providers import lorem
|
||||||
from faker.providers import misc
|
from faker.providers import misc
|
||||||
|
|
||||||
from src.database import get_comments_by_id, get_comment_ids, \
|
from src.database import get_comments_by_id
|
||||||
get_claim_comments
|
from src.database import get_comment_ids
|
||||||
|
from src.database import get_claim_comments
|
||||||
from src.writes import create_comment_or_error
|
from src.writes import create_comment_or_error
|
||||||
from tests.testcase import DatabaseTestCase
|
from tests.testcase import DatabaseTestCase
|
||||||
|
|
||||||
|
|
|
@ -123,7 +123,7 @@ class DatabaseTestCase(unittest.TestCase):
|
||||||
super().setUp()
|
super().setUp()
|
||||||
if pathlib.Path(config['PATH']['TEST']).exists():
|
if pathlib.Path(config['PATH']['TEST']).exists():
|
||||||
teardown_database(config['PATH']['TEST'])
|
teardown_database(config['PATH']['TEST'])
|
||||||
setup_database(config['PATH']['TEST'])
|
setup_database(config['PATH']['TEST'], config['PATH']['SCHEMA'])
|
||||||
self.conn = obtain_connection(config['PATH']['TEST'])
|
self.conn = obtain_connection(config['PATH']['TEST'])
|
||||||
|
|
||||||
def tearDown(self) -> None:
|
def tearDown(self) -> None:
|
||||||
|
|
Loading…
Reference in a new issue