lbry-sdk/lbrynet/extras/daemon/analytics.py

286 lines
10 KiB
Python
Raw Normal View History

2017-04-26 20:15:38 +02:00
import collections
import logging
2018-05-05 05:50:42 +02:00
import asyncio
import aiohttp
2017-04-26 20:15:38 +02:00
from lbrynet import conf, utils
from lbrynet.extras import system_info
from lbrynet.extras.daemon.storage import looping_call
2017-04-26 20:15:38 +02:00
# Things We Track
SERVER_STARTUP = 'Server Startup'
SERVER_STARTUP_SUCCESS = 'Server Startup Success'
SERVER_STARTUP_ERROR = 'Server Startup Error'
DOWNLOAD_STARTED = 'Download Started'
DOWNLOAD_ERRORED = 'Download Errored'
DOWNLOAD_FINISHED = 'Download Finished'
HEARTBEAT = 'Heartbeat'
2017-04-27 02:02:00 +02:00
CLAIM_ACTION = 'Claim Action' # publish/create/update/abandon
NEW_CHANNEL = 'New Channel'
CREDITS_SENT = 'Credits Sent'
NEW_DOWNLOAD_STAT = 'Download'
UPNP_SETUP = "UPnP Setup"
2017-04-26 20:15:38 +02:00
BLOB_BYTES_UPLOADED = 'Blob Bytes Uploaded'
log = logging.getLogger(__name__)
class Manager:
2017-04-26 20:15:38 +02:00
def __init__(self, analytics_api, context=None, installation_id=None, session_id=None):
self.analytics_api = analytics_api
self._tracked_data = collections.defaultdict(list)
self.looping_tasks = {}
2017-04-26 20:15:38 +02:00
self.context = context or self._make_context(
system_info.get_platform(), conf.settings['wallet'])
self.installation_id = installation_id or conf.settings.installation_id
self.session_id = session_id or conf.settings.get_session_id()
self.is_started = False
@classmethod
def new_instance(cls, enabled=None):
api = Api.new_instance(enabled)
2017-04-26 20:15:38 +02:00
return cls(api)
# Things We Track
async def send_new_download_start(self, download_id, name, claim_dict):
await self._send_new_download_stats("start", download_id, name, claim_dict)
async def send_new_download_success(self, download_id, name, claim_dict):
await self._send_new_download_stats("success", download_id, name, claim_dict)
async def send_new_download_fail(self, download_id, name, claim_dict, e):
await self._send_new_download_stats("failure", download_id, name, claim_dict, {
'name': type(e).__name__ if hasattr(type(e), "__name__") else str(type(e)),
2018-09-26 21:45:53 +02:00
'message': str(e),
})
async def _send_new_download_stats(self, action, download_id, name, claim_dict, e=None):
await self.analytics_api.track({
'userId': 'lbry', # required, see https://segment.com/docs/sources/server/http/#track
'event': NEW_DOWNLOAD_STAT,
'properties': self._event_properties({
'download_id': download_id,
'name': name,
2018-09-26 21:45:53 +02:00
'sd_hash': None if not claim_dict else claim_dict.source_hash.decode(),
'action': action,
'error': e,
}),
'context': self.context,
'timestamp': utils.isonow(),
})
2017-04-26 20:15:38 +02:00
async def send_upnp_setup_success_fail(self, success, status):
await self.analytics_api.track(
self._event(UPNP_SETUP, {
'success': success,
'status': status,
})
)
async def send_server_startup(self):
await self.analytics_api.track(self._event(SERVER_STARTUP))
2017-04-26 20:15:38 +02:00
async def send_server_startup_success(self):
await self.analytics_api.track(self._event(SERVER_STARTUP_SUCCESS))
2017-04-26 20:15:38 +02:00
async def send_server_startup_error(self, message):
await self.analytics_api.track(self._event(SERVER_STARTUP_ERROR, {'message': message}))
2017-04-26 20:15:38 +02:00
async def send_download_started(self, id_, name, claim_dict=None):
await self.analytics_api.track(
2017-06-02 20:00:13 +02:00
self._event(DOWNLOAD_STARTED, self._download_properties(id_, name, claim_dict))
2017-04-26 20:15:38 +02:00
)
async def send_download_errored(self, err, id_, name, claim_dict, report):
2017-09-27 23:02:36 +02:00
download_error_properties = self._download_error_properties(err, id_, name, claim_dict,
report)
await self.analytics_api.track(self._event(DOWNLOAD_ERRORED, download_error_properties))
2017-04-26 20:15:38 +02:00
async def send_download_finished(self, id_, name, report, claim_dict=None):
2017-09-27 23:02:36 +02:00
download_properties = self._download_properties(id_, name, claim_dict, report)
await self.analytics_api.track(self._event(DOWNLOAD_FINISHED, download_properties))
2017-04-26 20:15:38 +02:00
async def send_claim_action(self, action):
await self.analytics_api.track(self._event(CLAIM_ACTION, {'action': action}))
2017-04-27 02:02:00 +02:00
async def send_new_channel(self):
await self.analytics_api.track(self._event(NEW_CHANNEL))
2017-04-27 02:02:00 +02:00
async def send_credits_sent(self):
await self.analytics_api.track(self._event(CREDITS_SENT))
2017-04-27 02:02:00 +02:00
async def _send_heartbeat(self):
await self.analytics_api.track(self._event(HEARTBEAT))
2017-04-26 20:15:38 +02:00
async def _update_tracked_metrics(self):
2017-04-26 20:15:38 +02:00
should_send, value = self.summarize_and_reset(BLOB_BYTES_UPLOADED)
if should_send:
await self.analytics_api.track(self._metric_event(BLOB_BYTES_UPLOADED, value))
2017-04-26 20:15:38 +02:00
# Setup / Shutdown
def start(self):
if not self.is_started:
for name, fn, secs in self._get_looping_calls():
self.looping_tasks[name] = asyncio.create_task(looping_call(secs, fn))
2017-04-26 20:15:38 +02:00
self.is_started = True
log.info("Start")
2017-04-26 20:15:38 +02:00
def shutdown(self):
if self.is_started:
try:
for name, task in self.looping_tasks.items():
if task:
task.cancel()
self.looping_tasks[name] = None
log.info("Stopped analytics looping calls")
self.is_started = False
except Exception as e:
log.exception('Got exception when trying to cancel tasks in analytics: ', exc_info=e)
def _get_looping_calls(self) -> list:
2017-04-26 20:15:38 +02:00
return [
2018-11-08 00:18:55 +01:00
('send_heartbeat', self._send_heartbeat, 300),
('update_tracked_metrics', self._update_tracked_metrics, 600),
2017-04-26 20:15:38 +02:00
]
def add_observation(self, metric, value):
self._tracked_data[metric].append(value)
def summarize_and_reset(self, metric, op=sum):
"""Apply `op` on the current values for `metric`.
This operation also resets the metric.
Returns:
a tuple (should_send, value)
"""
try:
values = self._tracked_data.pop(metric)
return True, op(values)
except KeyError:
return False, None
def _event(self, event, event_properties=None):
return {
'userId': 'lbry',
'event': event,
'properties': self._event_properties(event_properties),
'context': self.context,
'timestamp': utils.isonow()
}
def _metric_event(self, metric_name, value):
return self._event(metric_name, {'value': value})
def _event_properties(self, event_properties=None):
properties = {
'lbry_id': self.installation_id,
'session_id': self.session_id,
}
properties.update(event_properties or {})
return properties
@staticmethod
2017-09-27 23:02:36 +02:00
def _download_properties(id_, name, claim_dict=None, report=None):
2018-08-10 23:23:50 +02:00
sd_hash = None if not claim_dict else claim_dict.source_hash.decode()
2017-09-27 23:02:36 +02:00
p = {
2017-04-26 20:15:38 +02:00
'download_id': id_,
'name': name,
'stream_info': sd_hash
}
2017-09-27 23:02:36 +02:00
if report:
p['report'] = report
return p
@staticmethod
def _download_error_properties(error, id_, name, claim_dict, report):
2017-09-28 21:45:24 +02:00
def error_name(err):
if not hasattr(type(err), "__name__"):
return str(type(err))
return type(err).__name__
2017-09-27 23:02:36 +02:00
return {
'download_id': id_,
'name': name,
2018-08-10 23:23:50 +02:00
'stream_info': claim_dict.source_hash.decode(),
2017-09-28 21:45:24 +02:00
'error': error_name(error),
2018-08-10 23:23:50 +02:00
'reason': str(error),
2017-09-27 23:02:36 +02:00
'report': report
}
2017-04-26 20:15:38 +02:00
@staticmethod
def _make_context(platform, wallet):
# see https://segment.com/docs/spec/common/#context
# they say they'll ignore fields outside the spec, but evidently they don't
2018-05-29 18:45:42 +02:00
context = {
2017-04-26 20:15:38 +02:00
'app': {
'version': platform['lbrynet_version'],
'build': platform['build'],
},
# TODO: expand os info to give linux/osx specific info
'os': {
'name': platform['os_system'],
'version': platform['os_release']
},
}
if 'desktop' in platform and 'distro' in platform:
context['os']['desktop'] = platform['desktop']
context['os']['distro'] = platform['distro']
return context
2017-04-26 20:15:38 +02:00
class Api:
2018-05-05 05:50:42 +02:00
def __init__(self, cookies, url, write_key, enabled):
self.cookies = cookies
2017-04-26 20:15:38 +02:00
self.url = url
self._write_key = write_key
self._enabled = enabled
2017-04-26 20:15:38 +02:00
async def _post(self, endpoint, data):
2017-04-26 20:15:38 +02:00
# there is an issue with a timing condition with keep-alive
# that is best explained here: https://github.com/mikem23/keepalive-race
#
# If you make a request, wait just the right amount of time,
# then make another request, the requests module may opt to
# reuse the connection, but by the time the server gets it the
# timeout will have expired.
#
# by forcing the connection to close, we will disable the keep-alive.
2018-05-05 05:50:42 +02:00
2017-04-26 20:15:38 +02:00
assert endpoint[0] == '/'
request_kwargs = {
'method': 'POST',
'url': self.url + endpoint,
'headers': {'Connection': 'Close'},
'auth': aiohttp.BasicAuth(self._write_key, ''),
'json': data,
'cookies': self.cookies
}
try:
async with aiohttp.request(**request_kwargs) as response:
self.cookies.update(response.cookies)
except Exception as e:
log.exception('Encountered an exception while POSTing to %s: ', self.url + endpoint, exc_info=e)
2017-04-26 20:15:38 +02:00
async def track(self, event):
2017-04-26 20:15:38 +02:00
"""Send a single tracking event"""
if not self._enabled:
return 'Analytics disabled'
2017-04-26 20:15:38 +02:00
log.debug('Sending track event: %s', event)
await self._post('/track', event)
2017-04-26 20:15:38 +02:00
@classmethod
def new_instance(cls, enabled=None):
2017-04-26 20:15:38 +02:00
"""Initialize an instance using values from the configuration"""
if enabled is None:
2017-04-27 02:02:00 +02:00
enabled = conf.settings['share_usage_data']
2017-04-26 20:15:38 +02:00
return cls(
2018-05-05 05:50:42 +02:00
{},
2017-04-26 20:15:38 +02:00
conf.settings['ANALYTICS_ENDPOINT'],
utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']),
enabled,
2017-04-26 20:15:38 +02:00
)