make loggly logs less verbose

This commit is contained in:
Alex Grintsvayg 2017-03-08 10:21:12 -05:00
parent 531fa63590
commit 68f5588bd9
6 changed files with 24 additions and 106 deletions

View file

@ -10,7 +10,7 @@ at anytime.
## [Unreleased]
### Added
* Add file filters: `claim_id`, `outpoint`, and `rowid`
*
* Make loggly logs less verbose
*
### Changed

View file

@ -435,7 +435,6 @@ Returns:
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'search_timeout': float,
'download_timeout': int
'max_search_results': int,
@ -458,7 +457,6 @@ Args:
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'download_timeout': int
Returns:
settings dict

View file

@ -202,7 +202,6 @@ ADJUSTABLE_SETTINGS = {
'search_timeout': (float, 5.0),
'startup_scripts': (list, []),
'ui_branch': (str, 'master'),
'upload_log': (bool, True),
'use_auth_http': (bool, False),
'use_upnp': (bool, True),
'wallet': (str, LBRYUM_WALLET),

View file

@ -1,21 +1,15 @@
import datetime
import inspect
import json
import logging
import logging.handlers
import os
import platform
import sys
import traceback
import requests
from requests_futures.sessions import FuturesSession
import twisted.python.log
import lbrynet
from lbrynet import analytics
from lbrynet import build_type
from lbrynet import conf
from lbrynet import __version__ as lbrynet_version, analytics, build_type, conf
from lbrynet.core import utils
####
@ -25,7 +19,7 @@ from lbrynet.core import utils
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame.
#
if hasattr(sys, 'frozen'): #support for py2exe
if hasattr(sys, 'frozen'): # support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
@ -86,6 +80,7 @@ def _log_decorator(fn):
handler has its log-level set and is attached to the specified
logger or the root logger.
"""
def helper(*args, **kwargs):
log = kwargs.pop('log', logging.getLogger())
level = kwargs.pop('level', logging.INFO)
@ -96,6 +91,7 @@ def _log_decorator(fn):
handler = fn(*args, **kwargs)
configure_handler(handler, log, level)
return handler
return helper
@ -146,14 +142,15 @@ def configure_analytics_handler(analytics_manager):
def get_loggly_url(token=None, version=None):
token = token or utils.deobfuscate(conf.settings['LOGGLY_TOKEN'])
version = version or lbrynet.__version__
version = version or lbrynet_version
return LOGGLY_URL.format(token=token, tag='lbrynet-' + version)
def configure_loggly_handler(*args, **kwargs):
if build_type.BUILD == 'dev':
return
_configure_loggly_handler(*args, **kwargs)
level = kwargs.pop('level', logging.WARNING)
_configure_loggly_handler(*args, level=level, **kwargs)
@_log_decorator
@ -168,6 +165,7 @@ def _configure_loggly_handler(url=None, **kwargs):
class JsonFormatter(logging.Formatter):
"""Format log records using json serialization"""
def __init__(self, **kwargs):
self.attributes = kwargs
@ -187,6 +185,7 @@ class JsonFormatter(logging.Formatter):
data['exc_info'] = self.formatException(record.exc_info)
return json.dumps(data)
####
# This code is copied from logging/__init__.py in the python source code
####
@ -194,8 +193,8 @@ def findCaller(srcfile=None):
"""Returns the filename, line number and function name of the caller"""
srcfile = srcfile or _srcfile
f = inspect.currentframe()
#On some versions of IronPython, currentframe() returns None if
#IronPython isn't run with -X:Frames.
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
@ -209,6 +208,8 @@ def findCaller(srcfile=None):
rv = (filename, f.f_lineno, co.co_name)
break
return rv
###
@ -296,6 +297,7 @@ class LoggerNameFilter(object):
Debug records pass if the log record name (or a parent) match
the input list of logger names.
"""
def __init__(self, logger_names):
self.logger_names = logger_names
@ -318,52 +320,9 @@ def get_parent(logger_name):
return '.'.join(names)
class LogUploader(object):
def __init__(self, log_name, log_file, log_size):
self.log_name = log_name
self.log_file = log_file
self.log_size = log_size
def upload(self, exclude_previous, id_hash, log_type):
if not os.path.isfile(self.log_file):
return
log_contents = self.log_contents(exclude_previous)
params = {
'date': datetime.datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
'hash': id_hash,
'sys': platform.system(),
'type': self.get_type(log_type),
'log': log_contents
}
requests.post(conf.settings['LOG_POST_URL'], params)
def log_contents(self, exclude_previous):
with open(self.log_file) as f:
if exclude_previous:
f.seek(self.log_size)
log_contents = f.read()
else:
log_contents = f.read()
return log_contents
def get_type(self, log_type):
if log_type:
return "%s-%s" % (self.log_name, log_type)
else:
return self.log_name
@classmethod
def load(cls, log_name, log_file):
if os.path.isfile(log_file):
with open(log_file, 'r') as f:
log_size = len(f.read())
else:
log_size = 0
return cls(log_name, log_file, log_size)
class Logger(logging.Logger):
"""A logger that has an extra `fail` method useful for handling twisted failures."""
def fail(self, callback=None, *args, **kwargs):
"""Returns a function to log a failure from an errback.
@ -391,6 +350,7 @@ class Logger(logging.Logger):
keywoards are treated as normal log kwargs.
"""
fn, lno, func = findCaller()
def _fail(err, msg, *msg_args, **msg_kwargs):
level = msg_kwargs.pop('level', logging.ERROR)
msg += ": %s"
@ -410,6 +370,7 @@ class Logger(logging.Logger):
# never do and then we end up with an unhandled
# error that will get swallowed by twisted
self.exception('Failed to run callback')
return _fail
def trace(self, msg, *args, **kwargs):

View file

@ -220,11 +220,10 @@ class Daemon(AuthJSONRPCServer):
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
def __init__(self, root, analytics_manager, upload_logs_on_shutdown=True):
def __init__(self, root, analytics_manager):
AuthJSONRPCServer.__init__(self, conf.settings['use_auth_http'])
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
self.upload_logs_on_shutdown = upload_logs_on_shutdown
self.allowed_during_startup = [
'stop', 'status', 'version',
# delete these once they are fully removed:
@ -250,7 +249,6 @@ class Daemon(AuthJSONRPCServer):
self.max_key_fee = conf.settings['max_key_fee']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.search_timeout = conf.settings['search_timeout']
self.download_timeout = conf.settings['download_timeout']
self.max_search_results = conf.settings['max_search_results']
@ -277,7 +275,6 @@ class Daemon(AuthJSONRPCServer):
# TODO: this should probably be passed into the daemon, or
# possibly have the entire log upload functionality taken out
# of the daemon, but I don't want to deal with that now
self.log_uploader = log_support.LogUploader.load('lbrynet', self.log_file)
self.analytics_manager = analytics_manager
self.lbryid = utils.generate_id()
@ -306,29 +303,16 @@ class Daemon(AuthJSONRPCServer):
def setup(self, launch_ui):
self._modify_loggly_formatter()
@defer.inlineCallbacks
def _announce_startup():
def _wait_for_credits():
if float(self.session.wallet.get_balance()) == 0.0:
self.startup_status = STARTUP_STAGES[6]
return reactor.callLater(1, _wait_for_credits)
else:
return _announce()
def _announce():
self.announced_startup = True
self.startup_status = STARTUP_STAGES[5]
log.info("Started lbrynet-daemon")
log.info("%i blobs in manager", len(self.session.blob_manager.blobs))
if self.first_run:
d = self._upload_log(log_type="first_run")
elif self.upload_log:
d = self._upload_log(exclude_previous=True, log_type="start")
else:
d = defer.succeed(None)
d.addCallback(lambda _: self.session.blob_manager.get_all_verified_blobs())
d.addCallback(lambda _: _announce())
return d
yield self.session.blob_manager.get_all_verified_blobs()
yield _announce()
log.info("Starting lbrynet-daemon")
@ -528,16 +512,6 @@ class Daemon(AuthJSONRPCServer):
self.query_handlers[query_id] = handler
return defer.succeed(None)
def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if self.upload_log or force:
try:
self.log_uploader.upload(exclude_previous,
conf.settings.installation_id[:SHORT_ID_LEN],
log_type)
except requests.RequestException:
log.warning('Failed to upload log file')
return defer.succeed(None)
def _clean_up_temp_files(self):
for path in self.uploaded_temp_files:
try:
@ -555,17 +529,7 @@ class Daemon(AuthJSONRPCServer):
self._clean_up_temp_files()
if self.upload_logs_on_shutdown:
try:
d = self._upload_log(
log_type="close", exclude_previous=False if self.first_run else True)
except Exception:
log.warn('Failed to upload log', exc_info=True)
d = defer.succeed(None)
else:
d = defer.succeed(None)
d.addCallback(lambda _: self._stop_server())
d = self._stop_server()
d.addErrback(log.fail(), 'Failure while shutting down')
d.addCallback(lambda _: self._stop_reflector())
d.addErrback(log.fail(), 'Failure while shutting down')
@ -584,7 +548,6 @@ class Daemon(AuthJSONRPCServer):
'download_directory': str,
'max_upload': float,
'max_download': float,
'upload_log': bool,
'download_timeout': int,
'search_timeout': float,
'cache_time': int
@ -620,7 +583,6 @@ class Daemon(AuthJSONRPCServer):
self.download_directory = conf.settings['download_directory']
self.max_upload = conf.settings['max_upload']
self.max_download = conf.settings['max_download']
self.upload_log = conf.settings['upload_log']
self.download_timeout = conf.settings['download_timeout']
self.search_timeout = conf.settings['search_timeout']
self.cache_time = conf.settings['cache_time']
@ -1314,7 +1276,6 @@ class Daemon(AuthJSONRPCServer):
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'search_timeout': float,
'download_timeout': int
'max_search_results': int,
@ -1347,7 +1308,6 @@ class Daemon(AuthJSONRPCServer):
'download_directory': string,
'max_upload': float, 0.0 for unlimited
'max_download': float, 0.0 for unlimited
'upload_log': bool,
'download_timeout': int
Returns:
settings dict

View file

@ -57,7 +57,7 @@ def get_test_daemon(data_rate=None, generous=True, with_fee=False):
'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1},
'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2}
}
daemon = LBRYDaemon(None, None, upload_logs_on_shutdown=False)
daemon = LBRYDaemon(None, None)
daemon.session = mock.Mock(spec=Session.Session)
daemon.exchange_rate_manager = ExchangeRateManager.DummyExchangeRateManager(rates)
base_prm = PaymentRateManager.BasePaymentRateManager(rate=data_rate)