Logging updates

This commit is contained in:
Oleg Silkin 2019-07-20 09:06:34 -04:00
parent 3c8cc2520b
commit 43d23c4a48
6 changed files with 95 additions and 45 deletions

View file

@ -6,9 +6,14 @@
"DEFAULT": "database/default.db", "DEFAULT": "database/default.db",
"TEST": "tests/test.db", "TEST": "tests/test.db",
"ERROR_LOG": "logs/error.log", "ERROR_LOG": "logs/error.log",
"LOG": "logs/server.log" "DEBUG_LOG": "logs/debug.log",
"SERVER_LOG": "logs/server.log"
},
"LOGGING": {
"FORMAT": "%(asctime)s | %(levelname)s | %(name)s | %(module)s.%(funcName)s:%(lineno)d | %(message)s",
"AIOHTTP_FORMAT": "%(asctime)s | %(levelname)s | %(name)s | %(message)s",
"DATEFMT": "%Y-%m-%d %H:%M:%S"
}, },
"LOGGING_FORMAT": "%(asctime)s - %(levelname)s - %(module)s - %(funcName)s - %(message)s",
"HOST": "localhost", "HOST": "localhost",
"PORT": 5921, "PORT": 5921,
"BACKUP_INT": 3600, "BACKUP_INT": 3600,

73
main.py
View file

@ -1,4 +1,75 @@
import logging.config
import logging
import os
from src.settings import config
from src.app import run_app from src.app import run_app
def config_logging_from_settings(conf):
_config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"standard": {
"format": conf['LOGGING']['FORMAT'],
"datefmt": conf['LOGGING']['DATEFMT']
},
"aiohttp": {
"format": conf['LOGGING']['AIOHTTP_FORMAT'],
"datefmt": conf['LOGGING']['DATEFMT']
}
},
"handlers": {
"console": {
"level": "DEBUG",
"formatter": "standard",
"class": "logging.StreamHandler",
"stream": "ext://sys.stdout"
},
"debug": {
"level": "DEBUG",
"formatter": "standard",
"class": "logging.handlers.RotatingFileHandler",
"filename": conf['PATH']['DEBUG_LOG'],
"maxBytes": 10485760,
"backupCount": 5
},
"error": {
"level": "ERROR",
"formatter": "standard",
"class": "logging.handlers.RotatingFileHandler",
"filename": conf['PATH']['ERROR_LOG'],
"maxBytes": 10485760,
"backupCount": 5
},
"server": {
"level": "NOTSET",
"formatter": "aiohttp",
"class": "logging.handlers.RotatingFileHandler",
"filename": conf['PATH']['SERVER_LOG'],
"maxBytes": 10485760,
"backupCount": 5
}
},
"loggers": {
"": {
"handlers": ["console", "debug", "error"],
"level": "DEBUG",
"propogate": True
},
"aiohttp.access": {
"handlers": ["server"],
"level": "INFO",
"propogate": False
}
}
}
logging.config.dictConfig(_config)
if __name__ == '__main__': if __name__ == '__main__':
run_app() config_logging_from_settings(config)
logger = logging.getLogger(__name__)
run_app(config)

View file

@ -11,29 +11,8 @@ from aiohttp import web
import schema.db_helpers import schema.db_helpers
from src.database import obtain_connection, DatabaseWriter from src.database import obtain_connection, DatabaseWriter
from src.handles import api_endpoint from src.handles import api_endpoint
from src.settings import config_path, get_config
config = get_config(config_path)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(config['LOGGING_FORMAT'])
debug_handler = logging.FileHandler(config['PATH']['LOG'])
error_handler = logging.FileHandler(config['PATH']['ERROR_LOG'])
stdout_handler = logging.StreamHandler()
debug_handler.setLevel(logging.DEBUG)
error_handler.setLevel(logging.ERROR)
stdout_handler.setLevel(logging.DEBUG)
debug_handler.setFormatter(formatter)
error_handler.setFormatter(formatter)
stdout_handler.setFormatter(formatter)
logger.addHandler(debug_handler)
logger.addHandler(error_handler)
logger.addHandler(stdout_handler)
async def setup_db_schema(app): async def setup_db_schema(app):
@ -45,7 +24,7 @@ async def setup_db_schema(app):
async def close_comment_scheduler(app): async def close_comment_scheduler(app):
logger.debug('Closing comment_scheduler') logger.info('Closing comment_scheduler')
await app['comment_scheduler'].close() await app['comment_scheduler'].close()
@ -78,7 +57,7 @@ def insert_to_config(app, conf=None, db_file=None):
async def cleanup_background_tasks(app): async def cleanup_background_tasks(app):
logger.debug('Ending background backup loop') logger.info('Ending background backup loop')
app['waitful_backup'].cancel() app['waitful_backup'].cancel()
await app['waitful_backup'] await app['waitful_backup']
app['reader'].close() app['reader'].close()
@ -90,17 +69,17 @@ def create_app(conf, db_path='DEFAULT', **kwargs):
insert_to_config(app, conf, db_path) insert_to_config(app, conf, db_path)
app.on_startup.append(setup_db_schema) app.on_startup.append(setup_db_schema)
app.on_startup.append(start_background_tasks) app.on_startup.append(start_background_tasks)
app.on_shutdown.append(close_comment_scheduler)
app.on_shutdown.append(cleanup_background_tasks) app.on_shutdown.append(cleanup_background_tasks)
app.on_shutdown.append(close_comment_scheduler)
aiojobs.aiohttp.setup(app, **kwargs) aiojobs.aiohttp.setup(app, **kwargs)
app.add_routes([web.post('/api', api_endpoint)]) app.add_routes([web.post('/api', api_endpoint)])
return app return app
def run_app(): def run_app(config):
appl = create_app(conf=config, db_path='DEFAULT', close_timeout=5.0) appl = create_app(conf=config, db_path='DEFAULT', close_timeout=5.0)
try: try:
asyncio.run(web.run_app(appl, access_log=logger, host=config['HOST'], port=config['PORT'])) asyncio.run(web.run_app(appl, access_log=logging.getLogger('aiohttp.access'), host=config['HOST'], port=config['PORT']))
except asyncio.CancelledError: except asyncio.CancelledError:
pass pass
except ValueError: except ValueError:

View file

@ -95,7 +95,7 @@ def insert_comment(conn: sqlite3.Connection, claim_id: str, comment: str, parent
""", """,
(comment_id, claim_id, channel_id, comment, parent_id, timestamp, signature, signing_ts) (comment_id, claim_id, channel_id, comment, parent_id, timestamp, signature, signing_ts)
) )
logger.debug('Inserted Comment into DB, `comment_id`: %s', comment_id) logger.info('Inserted Comment into DB, `comment_id`: %s', comment_id)
return comment_id return comment_id

View file

@ -84,21 +84,19 @@ async def process_json(app, body: dict) -> dict:
method = body['method'] method = body['method']
params = body.get('params', {}) params = body.get('params', {})
clean_input_params(params) clean_input_params(params)
logger.debug(f'Received Method {method}, params: {params}')
try: try:
if asyncio.iscoroutinefunction(METHODS[method]): if asyncio.iscoroutinefunction(METHODS[method]):
result = await METHODS[method](app, params) result = await METHODS[method](app, params)
else: else:
result = METHODS[method](app, params) result = METHODS[method](app, params)
response['result'] = result response['result'] = result
except TypeError as te: except Exception as err:
logger.exception('Got TypeError: %s', te) logger.exception(f'Got {type(err).__name__}: {err}')
response['error'] = ERRORS['INVALID_PARAMS'] if type(err) in (ValueError, TypeError):
except ValueError as ve: response['error'] = make_error('INVALID_PARAMS', err)
logger.exception('Got ValueError: %s', ve) else:
response['error'] = ERRORS['INVALID_PARAMS'] response['error'] = make_error('INTERNAL', err)
except Exception as e:
logger.exception('Got unknown exception: %s', e)
response['error'] = ERRORS['INTERNAL']
else: else:
response['error'] = ERRORS['METHOD_NOT_FOUND'] response['error'] = ERRORS['METHOD_NOT_FOUND']
return response return response
@ -107,7 +105,6 @@ async def process_json(app, body: dict) -> dict:
@atomic @atomic
async def api_endpoint(request: web.Request): async def api_endpoint(request: web.Request):
try: try:
logger.info('Received POST request from %s', request.remote)
body = await request.json() body = await request.json()
if type(body) is list or type(body) is dict: if type(body) is list or type(body) is dict:
if type(body) is list: if type(body) is list:
@ -127,5 +124,6 @@ async def api_endpoint(request: web.Request):
'error': ERRORS['PARSE_ERROR'] 'error': ERRORS['PARSE_ERROR']
}) })
except Exception as e: except Exception as e:
logger.exception('Exception raised by request from %s: %s', request.remote, e) logger.exception(f'Exception raised by request from {request.remote}: {e}')
return web.json_response({'error': ERRORS['INVALID_REQUEST']}) logger.debug(f'Request headers: {request.headers}')
return make_error('INVALID_REQUEST', e)

View file

@ -1,9 +1,7 @@
# cython: language_level=3 # cython: language_level=3
import json import json
import logging
import pathlib import pathlib
logger = logging.getLogger(__name__)
root_dir = pathlib.Path(__file__).parent.parent root_dir = pathlib.Path(__file__).parent.parent
config_path = root_dir / 'config' / 'conf.json' config_path = root_dir / 'config' / 'conf.json'
@ -18,4 +16,3 @@ def get_config(filepath):
config = get_config(config_path) config = get_config(config_path)
logger.info('Loaded conf.json: %s', json.dumps(config, indent=4))