Adds logger to server

This commit is contained in:
Oleg Silkin 2019-05-21 06:56:27 -04:00
parent 0cb73c8314
commit 7d0bd1763f
6 changed files with 82 additions and 8 deletions

View file

@ -4,12 +4,16 @@
"MAIN": "database/comments.db",
"BACKUP": "database/comments.backup.db",
"DEFAULT": "database/default.db",
"TEST": "tests/test.db"
"TEST": "tests/test.db",
"ERROR_LOG": "error.log",
"LOG": "server.log"
},
"ANONYMOUS": {
"CHANNEL_ID": "9cb713f01bf247a0e03170b5ed00d5161340c486",
"CHANNEL_NAME": "@Anonymous"
},
"LOGGING_FORMAT": "%(asctime)s - %(name)s - %(level)s - %(message)s",
"HOST": "localhost",
"PORT": 2903
"PORT": 2903,
"BACKUP_INT": 3600
}

View file

@ -4,8 +4,11 @@ import typing
import re
import nacl.hash
import time
import logging
from lbry_comment_server.settings import config
logger = logging.getLogger(__name__)
def obtain_connection(filepath: str = None, row_factory: bool = True):
connection = sqlite3.connect(filepath)
@ -94,6 +97,7 @@ def create_comment(conn: sqlite3.Connection, comment: str, claim_id: str, **kwar
)
_insert_channel(conn, channel_name, channel_id)
except AssertionError:
logger.exception('Received invalid input')
return None
else:
channel_id = config['ANONYMOUS']['CHANNEL_ID']

View file

@ -1,14 +1,16 @@
import json
import sqlite3
import asyncio
from aiojobs.aiohttp import atomic
from aiohttp import web
import logging
from lbry_comment_server.database import obtain_connection
from lbry_comment_server.settings import config
from lbry_comment_server import get_claim_comments
from lbry_comment_server import get_comments_by_id, get_comment_ids
import lbry_comment_server.writes as writes
logger = logging.getLogger(__name__)
ERRORS = {
'INVALID_PARAMS': {'code': -32602, 'message': 'Invalid parameters'},
'INTERNAL': {'code': -32603, 'message': 'An internal error'},
@ -61,7 +63,7 @@ async def process_json(app, body: dict) -> dict:
result = METHODS[method](app, **params)
response['result'] = result
except TypeError as te:
print(te)
logger.exception('Got TypeError: %s', te)
response['error'] = ERRORS['INVALID_PARAMS']
else:
response['error'] = ERRORS['UNKNOWN']
@ -72,6 +74,7 @@ async def process_json(app, body: dict) -> dict:
async def api_endpoint(request: web.Request):
try:
body = await request.json()
logger.info('Received POST request from %s', request.remote)
if type(body) is list or type(body) is dict:
if type(body) is list:
return web.json_response(
@ -82,6 +85,8 @@ async def api_endpoint(request: web.Request):
else:
return web.json_response({'error': ERRORS['UNKNOWN']})
except json.decoder.JSONDecodeError as jde:
logger.exception('Received malformed JSON from %s: %s', request.remote, jde.msg)
logger.debug('Request headers: %s', request.headers)
return web.json_response({
'error': {'message': jde.msg, 'code': -1}
})

View file

@ -1,34 +1,85 @@
import asyncio
from aiohttp import web
import aiojobs.aiohttp
import logging
import schema.db_helpers as helpers
import lbry_comment_server.writes as writes
from lbry_comment_server.settings import config
from lbry_comment_server.handles import api_endpoint
from lbry_comment_server.database import obtain_connection
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(config['LOGGING_FORMAT'])
debug_handler = logging.FileHandler(config['PATH']['LOG'])
error_handler = logging.FileHandler(config['PATH']['ERROR_LOG'])
stdout_handler = logging.StreamHandler()
debug_handler.setLevel(logging.DEBUG)
error_handler.setLevel(logging.ERROR)
stdout_handler.setLevel(logging.DEBUG)
debug_handler.setFormatter(formatter)
error_handler.setFormatter(formatter)
stdout_handler.setFormatter(formatter)
logger.addHandler(debug_handler)
logger.addHandler(error_handler)
logger.addHandler(stdout_handler)
async def setup_db_schema(app):
logger.info('Setting up schema in %s', app['db_path'])
helpers.setup_database(app['db_path'])
async def close_comment_scheduler(app):
logger.debug('Closing comment_scheduler')
await app['comment_scheduler'].close()
async def create_database_backup(app):
try:
while True:
await asyncio.sleep(app['config']['BACKUP_INT'])
with obtain_connection(app['db_path']) as conn:
logger.debug('%s backing up database')
helpers.backup_database(conn, app['backup'])
except asyncio.CancelledError as e:
logger.exception('Database backup loop has been cancelled')
async def start_background_tasks(app: web.Application):
app['waitful_backup'] = app.loop.create_task(create_database_backup(app))
async def cleanup_background_tasks(app):
logger.debug('Ending background backup loop')
app['waitful_backup'].cancel()
await app['waitful_backup']
def create_app(**kwargs):
app = web.Application()
app['config'] = config
app['db_path'] = config['PATH']['DEFAULT']
app['backup'] = config['PATH']['BACKUP']
app.on_startup.append(setup_db_schema)
app.on_startup.append(start_background_tasks)
app['reader'] = obtain_connection(app['db_path'], True)
app.on_shutdown.append(close_comment_scheduler)
app.on_shutdown.append(cleanup_background_tasks)
aiojobs.aiohttp.setup(app, **kwargs)
app.add_routes([web.post('/api', api_endpoint)])
return app
async def stop_app(runner):
logger.info('stopping app; running cleanup routine')
await runner.cleanup()
@ -42,8 +93,8 @@ async def run_app(app, duration=3600):
site = web.TCPSite(runner, config['HOST'], config['PORT'])
await site.start()
await asyncio.sleep(duration)
except asyncio.CancelledError:
pass
except asyncio.CancelledError as cerr:
logger.exception('Got cancellation signal', )
finally:
await stop_app(runner)

View file

@ -1,5 +1,8 @@
import json
import pathlib
import logging
logger = logging.getLogger(__name__)
root_dir = pathlib.Path(__file__).parent.parent
config_path = root_dir / 'config' / 'conf.json'
@ -14,4 +17,4 @@ def get_config(filepath):
config = get_config(config_path)
# print(json.dumps(config, indent=4))
logger.info('Loaded conf.json: %s', json.dumps(config, indent=4))

View file

@ -3,6 +3,10 @@ import atexit
from asyncio import coroutine
import lbry_comment_server.database as db
import logging
logger = logging.getLogger(__name__)
# DatabaseWriter should be instantiated on startup
class DatabaseWriter(object):
@ -13,10 +17,13 @@ class DatabaseWriter(object):
self.conn = db.obtain_connection(db_file)
DatabaseWriter._writer = self
atexit.register(self.cleanup)
logging.info('Database writer has been created at %s', repr(self))
else:
logging.warning('Someone attempted to insantiate DatabaseWriter')
raise TypeError('Database Writer already exists!')
def cleanup(self):
logging.info('Cleaning up database writer')
DatabaseWriter._writer = None
self.conn.close()