2019-05-26 06:42:39 +02:00
|
|
|
# cython: language_level=3
|
2019-05-21 13:54:52 +02:00
|
|
|
import logging
|
2019-05-30 17:25:42 +02:00
|
|
|
import pathlib
|
|
|
|
import re
|
2019-05-21 13:54:52 +02:00
|
|
|
|
|
|
|
import aiojobs.aiohttp
|
2019-05-16 01:17:06 +02:00
|
|
|
import asyncio
|
|
|
|
from aiohttp import web
|
|
|
|
|
2019-05-21 15:02:50 +02:00
|
|
|
import schema.db_helpers
|
2019-05-21 21:13:08 +02:00
|
|
|
from src.database import obtain_connection
|
|
|
|
from src.handles import api_endpoint
|
2019-05-26 06:42:39 +02:00
|
|
|
from src.handles import create_comment_scheduler
|
2019-05-26 07:31:05 +02:00
|
|
|
from src.settings import config_path, get_config
|
2019-05-26 06:42:39 +02:00
|
|
|
from src.writes import DatabaseWriter
|
2019-05-21 12:56:27 +02:00
|
|
|
|
2019-05-26 07:31:05 +02:00
|
|
|
config = get_config(config_path)
|
|
|
|
|
2019-05-21 12:56:27 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
2019-05-26 07:31:05 +02:00
|
|
|
logger.setLevel(logging.DEBUG)
|
2019-05-21 12:56:27 +02:00
|
|
|
|
|
|
|
formatter = logging.Formatter(config['LOGGING_FORMAT'])
|
|
|
|
debug_handler = logging.FileHandler(config['PATH']['LOG'])
|
|
|
|
error_handler = logging.FileHandler(config['PATH']['ERROR_LOG'])
|
|
|
|
stdout_handler = logging.StreamHandler()
|
|
|
|
|
|
|
|
debug_handler.setLevel(logging.DEBUG)
|
|
|
|
error_handler.setLevel(logging.ERROR)
|
|
|
|
stdout_handler.setLevel(logging.DEBUG)
|
|
|
|
|
|
|
|
debug_handler.setFormatter(formatter)
|
|
|
|
error_handler.setFormatter(formatter)
|
|
|
|
stdout_handler.setFormatter(formatter)
|
|
|
|
|
|
|
|
logger.addHandler(debug_handler)
|
|
|
|
logger.addHandler(error_handler)
|
|
|
|
logger.addHandler(stdout_handler)
|
2019-05-16 05:32:29 +02:00
|
|
|
|
2019-05-20 07:18:47 +02:00
|
|
|
|
2019-05-21 11:02:01 +02:00
|
|
|
async def setup_db_schema(app):
|
2019-05-30 17:25:42 +02:00
|
|
|
if not pathlib.Path(app['db_path']).exists():
|
|
|
|
logger.info('Setting up schema in %s', app['db_path'])
|
|
|
|
schema.db_helpers.setup_database(app['db_path'])
|
|
|
|
else:
|
|
|
|
logger.info('Database already exists in %s, skipping setup', app['db_path'])
|
2019-05-16 01:17:06 +02:00
|
|
|
|
2019-05-20 07:18:47 +02:00
|
|
|
|
2019-05-21 11:02:01 +02:00
|
|
|
async def close_comment_scheduler(app):
|
2019-05-21 12:56:27 +02:00
|
|
|
logger.debug('Closing comment_scheduler')
|
2019-05-21 11:02:01 +02:00
|
|
|
await app['comment_scheduler'].close()
|
2019-05-16 01:17:06 +02:00
|
|
|
|
|
|
|
|
2019-05-21 12:56:27 +02:00
|
|
|
async def create_database_backup(app):
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
await asyncio.sleep(app['config']['BACKUP_INT'])
|
|
|
|
with obtain_connection(app['db_path']) as conn:
|
2019-05-23 12:34:50 +02:00
|
|
|
logger.debug('backing up database')
|
2019-05-21 15:02:50 +02:00
|
|
|
schema.db_helpers.backup_database(conn, app['backup'])
|
2019-05-21 12:56:27 +02:00
|
|
|
|
|
|
|
except asyncio.CancelledError as e:
|
2019-05-21 13:28:21 +02:00
|
|
|
pass
|
2019-05-21 12:56:27 +02:00
|
|
|
|
2019-05-23 12:34:50 +02:00
|
|
|
|
2019-05-21 12:56:27 +02:00
|
|
|
async def start_background_tasks(app: web.Application):
|
|
|
|
app['waitful_backup'] = app.loop.create_task(create_database_backup(app))
|
2019-05-21 14:55:55 +02:00
|
|
|
app['comment_scheduler'] = await create_comment_scheduler()
|
2019-05-23 12:34:50 +02:00
|
|
|
app['writer'] = DatabaseWriter(app['db_path'])
|
|
|
|
|
|
|
|
|
|
|
|
def insert_to_config(app, conf=None, db_file=None):
|
|
|
|
db_file = db_file if db_file else 'DEFAULT'
|
|
|
|
app['config'] = conf if conf else config
|
|
|
|
app['db_path'] = conf['PATH'][db_file]
|
|
|
|
app['backup'] = re.sub(r'\.db$', '.backup.db', app['db_path'])
|
|
|
|
assert app['db_path'] != app['backup']
|
2019-05-21 12:56:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def cleanup_background_tasks(app):
|
|
|
|
logger.debug('Ending background backup loop')
|
|
|
|
app['waitful_backup'].cancel()
|
|
|
|
await app['waitful_backup']
|
2019-05-21 13:28:21 +02:00
|
|
|
app['reader'].close()
|
|
|
|
app['writer'].close()
|
2019-05-21 12:56:27 +02:00
|
|
|
|
|
|
|
|
2019-05-23 12:34:50 +02:00
|
|
|
def create_app(conf, db_path='DEFAULT', **kwargs):
|
2019-05-21 11:02:01 +02:00
|
|
|
app = web.Application()
|
2019-05-23 12:34:50 +02:00
|
|
|
insert_to_config(app, conf, db_path)
|
2019-05-21 11:02:01 +02:00
|
|
|
app.on_startup.append(setup_db_schema)
|
2019-05-21 12:56:27 +02:00
|
|
|
app.on_startup.append(start_background_tasks)
|
|
|
|
app['reader'] = obtain_connection(app['db_path'], True)
|
2019-05-21 11:02:01 +02:00
|
|
|
app.on_shutdown.append(close_comment_scheduler)
|
2019-05-21 12:56:27 +02:00
|
|
|
app.on_shutdown.append(cleanup_background_tasks)
|
2019-05-21 11:02:01 +02:00
|
|
|
aiojobs.aiohttp.setup(app, **kwargs)
|
|
|
|
app.add_routes([web.post('/api', api_endpoint)])
|
|
|
|
return app
|
2019-05-16 01:17:06 +02:00
|
|
|
|
|
|
|
|
2019-05-26 04:44:27 +02:00
|
|
|
def run_app():
|
2019-05-26 07:31:05 +02:00
|
|
|
appl = create_app(conf=config, db_path='DEFAULT', close_timeout=5.0)
|
2019-05-21 13:28:21 +02:00
|
|
|
try:
|
|
|
|
asyncio.run(web.run_app(appl, access_log=logger, host=config['HOST'], port=config['PORT']))
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
pass
|
|
|
|
except ValueError:
|
|
|
|
pass
|