diff --git a/src/schema/comments_ddl.sql b/src/database/comments_ddl.sql similarity index 94% rename from src/schema/comments_ddl.sql rename to src/database/comments_ddl.sql index fa537ea..7556fbd 100644 --- a/src/schema/comments_ddl.sql +++ b/src/database/comments_ddl.sql @@ -1,10 +1,10 @@ PRAGMA FOREIGN_KEYS = ON; --- tables --- DROP TABLE IF EXISTS COMMENT; --- DROP TABLE IF EXISTS CHANNEL; +-- Although I know this file is unnecessary, I like keeping it around. --- DROP TABLE IF EXISTS COMMENT; +-- I'm not gonna remove it. + +-- tables CREATE TABLE IF NOT EXISTS COMMENT ( CommentId TEXT NOT NULL, @@ -42,7 +42,6 @@ CREATE INDEX IF NOT EXISTS CLAIM_COMMENT_INDEX ON COMMENT (LbryClaimId, CommentI CREATE INDEX IF NOT EXISTS CHANNEL_COMMENT_INDEX ON COMMENT (ChannelId, CommentId); -- VIEWS -DROP VIEW IF EXISTS COMMENTS_ON_CLAIMS; CREATE VIEW IF NOT EXISTS COMMENTS_ON_CLAIMS (comment_id, claim_id, timestamp, channel_name, channel_id, channel_url, signature, signing_ts, parent_id, comment) AS SELECT C.CommentId, diff --git a/src/server/database.py b/src/database/queries.py similarity index 96% rename from src/server/database.py rename to src/database/queries.py index 5b56ae1..417cf15 100644 --- a/src/server/database.py +++ b/src/database/queries.py @@ -7,6 +7,8 @@ import typing import math import nacl.hash +from src.database.schema import CREATE_TABLES_QUERY + logger = logging.getLogger(__name__) @@ -200,3 +202,13 @@ class DatabaseWriter(object): @property def connection(self): return self.conn + + +def setup_database(db_path): + with sqlite3.connect(db_path) as conn: + conn.executescript(CREATE_TABLES_QUERY) + + +def backup_database(conn: sqlite3.Connection, back_fp): + with sqlite3.connect(back_fp) as back: + conn.backup(back) diff --git a/src/database/schema.py b/src/database/schema.py new file mode 100644 index 0000000..bf46f72 --- /dev/null +++ b/src/database/schema.py @@ -0,0 +1,75 @@ +PRAGMAS = """ + PRAGMA FOREIGN_KEYS = ON; +""" + +CREATE_COMMENT_TABLE = """ + CREATE TABLE IF NOT EXISTS COMMENT ( + CommentId TEXT NOT NULL, + LbryClaimId TEXT NOT NULL, + ChannelId TEXT DEFAULT NULL, + Body TEXT NOT NULL, + ParentId TEXT DEFAULT NULL, + Signature TEXT DEFAULT NULL, + Timestamp INTEGER NOT NULL, + SigningTs TEXT DEFAULT NULL, + CONSTRAINT COMMENT_PRIMARY_KEY PRIMARY KEY (CommentId) ON CONFLICT IGNORE, + CONSTRAINT COMMENT_SIGNATURE_SK UNIQUE (Signature) ON CONFLICT ABORT, + CONSTRAINT COMMENT_CHANNEL_FK FOREIGN KEY (ChannelId) REFERENCES CHANNEL (ClaimId) + ON DELETE NO ACTION ON UPDATE NO ACTION, + CONSTRAINT COMMENT_PARENT_FK FOREIGN KEY (ParentId) REFERENCES COMMENT (CommentId) + ON UPDATE CASCADE ON DELETE NO ACTION -- setting null implies comment is top level + ); +""" + +CREATE_COMMENT_INDEXES = """ + CREATE INDEX IF NOT EXISTS CLAIM_COMMENT_INDEX ON COMMENT (LbryClaimId, CommentId); + CREATE INDEX IF NOT EXISTS CHANNEL_COMMENT_INDEX ON COMMENT (ChannelId, CommentId); +""" + +CREATE_CHANNEL_TABLE = """ + CREATE TABLE IF NOT EXISTS CHANNEL ( + ClaimId TEXT NOT NULL, + Name TEXT NOT NULL, + CONSTRAINT CHANNEL_PK PRIMARY KEY (ClaimId) + ON CONFLICT IGNORE + ); +""" + +CREATE_COMMENTS_ON_CLAIMS_VIEW = """ + CREATE VIEW IF NOT EXISTS COMMENTS_ON_CLAIMS AS SELECT + C.CommentId AS comment_id, + C.Body AS comment, + C.LbryClaimId AS claim_id, + C.Timestamp AS timestamp, + CHAN.Name AS channel_name, + CHAN.ClaimId AS channel_id, + ('lbry://' || CHAN.Name || '#' || CHAN.ClaimId) AS channel_url, + C.Signature AS signature, + C.SigningTs AS signing_ts, + C.ParentId AS parent_id + FROM COMMENT AS C + LEFT OUTER JOIN CHANNEL CHAN ON C.ChannelId = CHAN.ClaimId + ORDER BY C.Timestamp DESC; +""" + +# not being used right now but should be kept around when Tom finally asks for replies +CREATE_COMMENT_REPLIES_VIEW = """ +CREATE VIEW IF NOT EXISTS COMMENT_REPLIES (Author, CommentBody, ParentAuthor, ParentCommentBody) AS +SELECT AUTHOR.Name, OG.Body, PCHAN.Name, PARENT.Body +FROM COMMENT AS OG + JOIN COMMENT AS PARENT + ON OG.ParentId = PARENT.CommentId + JOIN CHANNEL AS PCHAN ON PARENT.ChannelId = PCHAN.ClaimId + JOIN CHANNEL AS AUTHOR ON OG.ChannelId = AUTHOR.ClaimId +ORDER BY OG.Timestamp; +""" + +CREATE_TABLES_QUERY = ( + PRAGMAS + + CREATE_COMMENT_TABLE + + CREATE_COMMENT_INDEXES + + CREATE_CHANNEL_TABLE + + CREATE_COMMENTS_ON_CLAIMS_VIEW + + CREATE_COMMENT_REPLIES_VIEW +) + diff --git a/src/server/writes.py b/src/database/writes.py similarity index 89% rename from src/server/writes.py rename to src/database/writes.py index 902548d..0c4ba03 100644 --- a/src/server/writes.py +++ b/src/database/writes.py @@ -3,12 +3,12 @@ import sqlite3 from asyncio import coroutine -from src.server.database import delete_comment_by_id +from database.queries import delete_comment_by_id from src.server.misc import is_authentic_delete_signal -from src.server.database import get_comment_or_none -from src.server.database import insert_comment -from src.server.database import insert_channel +from database.queries import get_comment_or_none +from database.queries import insert_comment +from database.queries import insert_channel from src.server.misc import channel_matches_pattern_or_error logger = logging.getLogger(__name__) diff --git a/src/schema/db_helpers.py b/src/schema/db_helpers.py deleted file mode 100644 index 24ce26e..0000000 --- a/src/schema/db_helpers.py +++ /dev/null @@ -1,28 +0,0 @@ -import logging -import sqlite3 - -logger = logging.getLogger(__name__) - - -def setup_database(db_path, schema_path): - logger.info(f'Creating db schema from {schema_path} in {db_path}') - with sqlite3.connect(db_path) as conn: - with open(schema_path, 'r') as ddl: - with conn: - conn.executescript(ddl.read()) - - -def teardown_database(db_path): - logger.info('Dropping all tables from %s', db_path) - with sqlite3.connect(db_path) as conn: - conn.executescript(""" - DROP VIEW IF EXISTS COMMENTS_ON_CLAIMS; - DROP VIEW IF EXISTS COMMENT_REPLIES; - DROP TABLE IF EXISTS COMMENT; - DROP TABLE IF EXISTS CHANNEL; - """) - - -def backup_database(conn: sqlite3.Connection, back_fp): - with sqlite3.connect(back_fp) as back: - conn.backup(back) diff --git a/src/server/app.py b/src/server/app.py index 84ed22e..2512801 100644 --- a/src/server/app.py +++ b/src/server/app.py @@ -9,8 +9,8 @@ import aiojobs.aiohttp import asyncio from aiohttp import web -from src.schema.db_helpers import setup_database, backup_database -from src.server.database import obtain_connection, DatabaseWriter +from database.queries import setup_database, backup_database +from database.queries import obtain_connection, DatabaseWriter from src.server.handles import api_endpoint, get_api_endpoint logger = logging.getLogger(__name__) @@ -18,8 +18,8 @@ logger = logging.getLogger(__name__) async def setup_db_schema(app): if not pathlib.Path(app['db_path']).exists(): - logger.info('Setting up schema in %s', app['db_path']) - setup_database(app['db_path'], app['config']['PATH']['SCHEMA']) + logger.info(f'Setting up schema in {app["db_path"]}') + setup_database(app['db_path']) else: logger.info(f'Database already exists in {app["db_path"]}, skipping setup') diff --git a/src/server/handles.py b/src/server/handles.py index b28ab02..f1d8f98 100644 --- a/src/server/handles.py +++ b/src/server/handles.py @@ -7,13 +7,13 @@ from aiohttp import web from aiojobs.aiohttp import atomic from src.server.misc import clean_input_params -from src.server.database import get_claim_comments -from src.server.database import get_comments_by_id, get_comment_ids -from src.server.database import get_channel_id_from_comment_id +from database.queries import get_claim_comments +from database.queries import get_comments_by_id, get_comment_ids +from database.queries import get_channel_id_from_comment_id from src.server.misc import is_valid_base_comment from src.server.misc import is_valid_credential_input from src.server.misc import make_error -from src.server.writes import delete_comment_if_authorized, write_comment +from database.writes import delete_comment_if_authorized, write_comment logger = logging.getLogger(__name__) diff --git a/tests/database_test.py b/tests/database_test.py index 269d39b..3de9312 100644 --- a/tests/database_test.py +++ b/tests/database_test.py @@ -5,10 +5,10 @@ from faker.providers import internet from faker.providers import lorem from faker.providers import misc -from server.database import get_comments_by_id -from server.database import get_comment_ids -from server.database import get_claim_comments -from server.writes import create_comment_or_error +from database.queries import get_comments_by_id +from database.queries import get_comment_ids +from database.queries import get_claim_comments +from database.writes import create_comment_or_error from tests.testcase import DatabaseTestCase fake = faker.Faker() @@ -139,6 +139,7 @@ class TestCommentCreation(DatabaseTestCase): ) def test05InsertRandomComments(self): + # TODO: Fix this test into something practical self.skipTest('This is a bad test') top_comments, claim_ids = generate_top_comments_random() total = 0 @@ -165,6 +166,7 @@ class TestCommentCreation(DatabaseTestCase): del claim_ids def test06GenerateAndListComments(self): + # TODO: Make this test not suck self.skipTest('this is a stupid test') top_comments, claim_ids = generate_top_comments() total, success = 0, 0 @@ -197,8 +199,6 @@ class ListDatabaseTest(DatabaseTestCase): top_coms, self.claim_ids = generate_top_comments(5, 75) def testLists(self): - self.skipTest('Populating a database each time is not a good way to test listing') - for claim_id in self.claim_ids: with self.subTest(claim_id=claim_id): comments = get_claim_comments(self.conn, claim_id) diff --git a/tests/testcase.py b/tests/testcase.py index 4ab4324..2357347 100644 --- a/tests/testcase.py +++ b/tests/testcase.py @@ -6,9 +6,7 @@ from unittest.case import _Outcome import asyncio -from schema.db_helpers import setup_database, teardown_database -from server.database import obtain_connection -from settings import config +from database.queries import obtain_connection, setup_database class AsyncioTestCase(unittest.TestCase): @@ -129,7 +127,7 @@ class DatabaseTestCase(unittest.TestCase): def setUp(self) -> None: super().setUp() - setup_database(self.db_file, config['PATH']['SCHEMA']) + setup_database(self.db_file) self.conn = obtain_connection(self.db_file) self.addCleanup(self.conn.close) self.addCleanup(os.remove, self.db_file)