log cleanup

This commit is contained in:
Job Evers-Meltzer 2016-10-21 16:12:38 -07:00
parent 2ee6fac014
commit 40437e4a82
3 changed files with 67 additions and 45 deletions

View file

@ -1,11 +1,15 @@
import datetime
import json
import logging
import logging.handlers
import os
import platform
import sys
import traceback
import appdirs
import base58
import requests
from requests_futures.sessions import FuturesSession
import lbrynet
@ -219,7 +223,7 @@ def get_log_file():
except OSError:
pass
lbrynet_log = os.path.join(log_dir, conf.LOG_FILE_NAME)
lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME)
return lbrynet_log
@ -250,3 +254,37 @@ def get_parent(logger_name):
return ''
names = names[:-1]
return '.'.join(names)
class LogUploader(object):
def __init__(self, log_name, log_file, log_size):
self.log_name = log_name
self.log_file = log_file
self.log_size = log_size
def upload(self, exclude_previous, lbryid, log_type):
if not os.path.isfile(self.log_file):
return
with open(self.log_file) as f:
if exclude_previous:
f.seek(self.log_size)
log_contents = f.read()
else:
log_contents = f.read()
params = {
'date': datetime.datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
'hash': base58.b58encode(lbryid)[:20],
'sys': platform.system(),
'type': "%s-%s" % (self.log_name, log_type) if log_type else self.log_name,
'log': log_contents
}
requests.post(settings.LOG_POST_URL, params)
@classmethod
def load(cls, log_name, log_file):
if os.path.isfile(log_file):
with open(log_file, 'r') as f:
log_size = len(f.read())
else:
log_size = 0
return cls(log_name, log_file, log_size)

View file

@ -56,26 +56,23 @@ from lbrynet.lbrynet_daemon.ExchangeRateManager import ExchangeRateManager
from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient
from lbrynet.lbrynet_daemon.auth.server import AuthJSONRPCServer
from lbrynet.metadata.Metadata import Metadata, verify_name_characters
from lbrynet.core import log_support
from lbrynet.core import utils
from lbrynet.core.utils import generate_id
from lbrynet.lbrynet_console.Settings import Settings
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader
from lbrynet.core.Session import Session
from lbrynet.core.PTCWallet import PTCWallet
from lbrynet.core.Wallet import LBRYcrdWallet, LBRYumWallet
from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbryfile.EncryptedFileMetadataManager import DBEncryptedFileMetadataManager, TempEncryptedFileMetadataManager
from lbrynet import reflector
# TODO: this code snippet is everywhere. Make it go away
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
log_dir = user_data_dir("LBRY")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
lbrynet_log = os.path.join(log_dir, lbrynet_settings.LOG_FILE_NAME)
log = logging.getLogger(__name__)
if os.path.isfile(lbrynet_log):
with open(lbrynet_log, 'r') as f:
PREVIOUS_NET_LOG = len(f.read())
else:
PREVIOUS_NET_LOG = 0
INITIALIZING_CODE = 'initializing'
LOADING_DB_CODE = 'loading_db'
@ -275,12 +272,16 @@ class Daemon(AuthJSONRPCServer):
self.ui_version = None
self.ip = None
self.first_run = None
self.log_file = lbrynet_log
self.log_file = log_support.get_log_file()
self.current_db_revision = 1
self.session = None
self.first_run_after_update = False
self.uploaded_temp_files = []
self._session_id = base58.b58encode(generate_id())
# TODO: this should probably be passed into the daemon, or
# possibly have the entire log upload functionality taken out
# of the daemon, but I don't want to deal with that now
self.log_uploader = log_support.LogUploader.load('lbrynet', log_support.get_log_file())
self.analytics_manager = None
self.lbryid = PENDING_LBRY_ID
@ -324,7 +325,6 @@ class Daemon(AuthJSONRPCServer):
self.lbry_file_metadata_manager = None
self.lbry_file_manager = None
@AuthJSONRPCServer.subhandler
def _exclude_lbrycrd_only_commands_from_lbryum_session(self, request):
request.content.seek(0, 0)
@ -400,7 +400,6 @@ class Daemon(AuthJSONRPCServer):
self.exchange_rate_manager.start()
d = defer.Deferred()
if lbrynet_settings.host_ui:
self.lbry_ui_manager.update_checker.start(1800, now=False)
d.addCallback(lambda _: self.lbry_ui_manager.setup())
@ -621,37 +620,20 @@ class Daemon(AuthJSONRPCServer):
ds = []
for handler in query_handlers:
ds.append(self.settings.get_query_handler_status(handler.get_primary_query_identifier()))
query_id = handler.get_primary_query_identifier()
ds.append(self.settings.get_query_handler_status(query_id))
dl = defer.DeferredList(ds)
dl.addCallback(_set_query_handlers)
return dl
def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if self.upload_log or force:
for lm, lp in [('lbrynet', lbrynet_log)]:
if os.path.isfile(lp):
if exclude_previous:
with open( lp, "r") as f:
f.seek(PREVIOUS_NET_LOG)
log_contents = f.read()
else:
with open(lp, "r") as f:
log_contents = f.read()
if self.lbryid is not PENDING_LBRY_ID:
id_hash = base58.b58encode(self.lbryid)[:20]
else:
id_hash = self.lbryid
params = {
'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
'hash': id_hash,
'sys': platform.system(),
'type': "%s-%s" % (lm, log_type) if log_type else lm,
'log': log_contents
}
requests.post(lbrynet_settings.LOG_POST_URL, params)
return defer.succeed(None)
else:
return defer.succeed(None)
if self.lbryid is not PENDING_LBRY_ID:
id_hash = base58.b58encode(self.lbryid)[:20]
else:
id_hash = self.lbryid
self.log_uploader.upload(exclude_previous, self.lbryid, log_type)
return defer.succeed(None)
def _clean_up_temp_files(self):
for path in self.uploaded_temp_files:
@ -2187,7 +2169,8 @@ class Daemon(AuthJSONRPCServer):
check_require = True
if 'path' in p:
d = self.lbry_ui_manager.setup(user_specified=p['path'], check_requirements=check_require)
d = self.lbry_ui_manager.setup(
user_specified=p['path'], check_requirements=check_require)
elif 'branch' in p:
d = self.lbry_ui_manager.setup(branch=p['branch'], check_requirements=check_require)
else:

View file

@ -1,5 +1,6 @@
import argparse
import logging.handlers
import os
import webbrowser
from twisted.web import server, guard