track more events

This commit is contained in:
Alex Grintsvayg 2017-04-26 20:02:00 -04:00
parent 8afe4d1af8
commit 15e89650ff
3 changed files with 34 additions and 17 deletions

View file

@ -15,6 +15,9 @@ DOWNLOAD_STARTED = 'Download Started'
DOWNLOAD_ERRORED = 'Download Errored'
DOWNLOAD_FINISHED = 'Download Finished'
HEARTBEAT = 'Heartbeat'
CLAIM_ACTION = 'Claim Action' # publish/create/update/abandon
NEW_CHANNEL = 'New Channel'
CREDITS_SENT = 'Credits Sent'
BLOB_BYTES_UPLOADED = 'Blob Bytes Uploaded'
BLOB_BYTES_AVAILABLE = 'Blob Bytes Available'
@ -64,6 +67,15 @@ class Manager(object):
self._event(DOWNLOAD_FINISHED, self._download_properties(id_, name, stream_info))
)
def send_claim_action(self, action):
self.analytics_api.track(self._event(CLAIM_ACTION, {'action': action}))
def send_new_channel(self):
self.analytics_api.track(self._event(NEW_CHANNEL))
def send_credits_sent(self):
self.analytics_api.track(self._event(CREDITS_SENT))
def _send_heartbeat(self):
self.analytics_api.track(self._event(HEARTBEAT))
@ -236,7 +248,7 @@ class Api(object):
"""Initialize an instance using values from the configuration"""
session = Session()
if enabled is None:
enabled = conf.settings['share_debug_info']
enabled = conf.settings['share_usage_data']
return cls(
session,
conf.settings['ANALYTICS_ENDPOINT'],

View file

@ -199,7 +199,7 @@ ADJUSTABLE_SETTINGS = {
'run_on_startup': (bool, False),
'run_reflector_server': (bool, False),
'sd_download_timeout': (int, 3),
'share_debug_info': (bool, True), # whether to share diagnostic info with LBRY
'share_usage_data': (bool, True), # whether to share usage stats and diagnostic info with LBRY
'peer_search_timeout': (int, 3),
'search_servers': (list, ['lighthouse1.lbry.io:50005']),
'search_timeout': (float, 5.0),
@ -392,8 +392,11 @@ class Config(object):
if 'startup_scripts' in settings_dict:
del settings_dict['startup_scripts']
if 'upload_log' in settings_dict:
settings_dict['share_debug_info'] = settings_dict['upload_log']
settings_dict['share_usage_data'] = settings_dict['upload_log']
del settings_dict['upload_log']
if 'share_debug_info' in settings_dict:
settings_dict['share_usage_data'] = settings_dict['share_debug_info']
del settings_dict['share_debug_info']
for key in settings_dict.keys():
if not self._is_valid_setting(key):
log.warning('Ignoring invalid conf file setting: %s', key)

View file

@ -441,7 +441,7 @@ class Daemon(AuthJSONRPCServer):
'download_timeout': int,
'search_timeout': float,
'cache_time': int,
'share_debug_info': bool,
'share_usage_data': bool,
}
def can_update_key(settings, key, setting_type):
@ -698,6 +698,7 @@ class Daemon(AuthJSONRPCServer):
d = reupload.reflect_stream(publisher.lbry_file)
d.addCallbacks(lambda _: log.info("Reflected new publication to lbry://%s", name),
log.exception)
self.analytics_manager.send_claim_action('publish')
log.info("Success! Published to lbry://%s txid: %s nout: %d", name, claim_out['txid'],
claim_out['nout'])
defer.returnValue(claim_out)
@ -1710,6 +1711,7 @@ class Daemon(AuthJSONRPCServer):
raise InsufficientFundsError()
result = yield self.session.wallet.claim_new_channel(channel_name, amount)
self.analytics_manager.send_new_channel()
log.info("Claimed a new channel! Result: %s", result)
response = yield self._render_response(result)
defer.returnValue(response)
@ -1824,20 +1826,18 @@ class Daemon(AuthJSONRPCServer):
# original format {'currency':{'address','amount'}}
# add address to fee if unspecified {'version': ,'currency', 'address' , 'amount'}
if 'fee' in metadata:
new_fee_dict = {}
assert len(metadata['fee']) == 1, "Too many fees"
currency, fee_dict = metadata['fee'].items()[0]
if 'address' not in fee_dict:
address = yield self.session.wallet.get_new_address()
else:
address = fee_dict['address']
new_fee_dict = {
metadata['fee'] = {
'version': '_0_0_1',
'currency': currency,
'address': address,
'amount': fee_dict['amount']
}
metadata['fee'] = new_fee_dict
log.info("Publish: %s", {
'name': name,
@ -1900,6 +1900,7 @@ class Daemon(AuthJSONRPCServer):
try:
abandon_claim_tx = yield self.session.wallet.abandon_claim(claim_id)
self.analytics_manager.send_claim_action('abandon')
response = yield self._render_response(abandon_claim_tx)
except BaseException as err:
log.warning(err)
@ -1913,10 +1914,9 @@ class Daemon(AuthJSONRPCServer):
@AuthJSONRPCServer.auth_required
def jsonrpc_abandon_name(self, **kwargs):
"""
DEPRECATED, use abandon_claim
DEPRECATED. Use `claim_abandon` instead
"""
return self.jsonrpc_abandon_claim(**kwargs)
return self.jsonrpc_claim_abandon(**kwargs)
@AuthJSONRPCServer.auth_required
def jsonrpc_support_claim(self, **kwargs):
@ -1926,6 +1926,7 @@ class Daemon(AuthJSONRPCServer):
return self.jsonrpc_claim_new_support(**kwargs)
@AuthJSONRPCServer.auth_required
@defer.inlineCallbacks
def jsonrpc_claim_new_support(self, name, claim_id, amount):
"""
Support a name claim
@ -1943,9 +1944,9 @@ class Daemon(AuthJSONRPCServer):
}
"""
d = self.session.wallet.support_claim(name, claim_id, amount)
d.addCallback(lambda r: self._render_response(r))
return d
result = yield self.session.wallet.support_claim(name, claim_id, amount)
self.analytics_manager.send_claim_action('new_support')
defer.returnValue(result)
# TODO: merge this into claim_list
@AuthJSONRPCServer.auth_required
@ -2203,6 +2204,7 @@ class Daemon(AuthJSONRPCServer):
return d
@AuthJSONRPCServer.auth_required
@defer.inlineCallbacks
def jsonrpc_send_amount_to_address(self, amount, address):
"""
Send credits to an address
@ -2216,10 +2218,10 @@ class Daemon(AuthJSONRPCServer):
reserved_points = self.session.wallet.reserve_points(address, amount)
if reserved_points is None:
return defer.fail(InsufficientFundsError())
d = self.session.wallet.send_points_to_address(reserved_points, amount)
d.addCallback(lambda _: self._render_response(True))
return d
raise InsufficientFundsError()
yield self.session.wallet.send_points_to_address(reserved_points, amount)
self.analytics_manager.send_credits_sent()
defer.returnValue(True)
def jsonrpc_get_block(self, **kwargs):
"""