updates from master, more refactoring

-lbrynet.lbrynet_daemon.auth.client.LBRYAPIClient.config will detect if
it needs to return the auth/non-auth version
This commit is contained in:
Jack 2016-10-26 03:16:33 -04:00
commit 1951ea09cd
30 changed files with 451 additions and 305 deletions

View file

@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.6.7
current_version = 0.6.9
commit = True
tag = True

View file

@ -1,7 +1,7 @@
import logging
from conf import Config
__version__ = "0.6.7"
__version__ = "0.6.9"
version = tuple(__version__.split('.'))
settings = Config()
logging.getLogger(__name__).addHandler(logging.NullHandler())

View file

@ -3,7 +3,6 @@ import os
import sys
from appdirs import user_data_dir
PRIORITIZE_ENV = True
LINUX = 1
DARWIN = 2
WINDOWS = 3
@ -25,134 +24,195 @@ else:
default_data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
default_lbryum_dir = os.path.join(os.path.expanduser("~"), ".lbryum")
ADJUSTABLE_SETTINGS = {
'run_on_startup': False,
'download_directory': default_download_directory,
'max_upload': 0.0,
'max_download': 0.0,
'upload_log': True,
'delete_blobs_on_remove': True,
'use_upnp': True,
'start_lbrycrdd': True,
'run_reflector_server': False,
'startup_scripts': [],
'last_version': {},
'peer_port': 3333,
'dht_node_port': 4444,
'reflector_port': 5566,
'download_timeout': 30,
'max_search_results': 25,
'search_timeout': 3.0,
'cache_time': 150,
'host_ui': True,
'check_ui_requirements': True,
'local_ui_path': False,
'API_PORT': 5279,
'search_servers':['lighthouse1.lbry.io:50005'],
'data_rate': .0001, # points/megabyte
'MIN_BLOB_INFO_PAYMENT_RATE': .02, # points/1000 infos
'MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE': .05, # points/1000 infos
'MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE': .05, # points/1000 infos
'max_connections_per_stream': 5,
'known_dht_nodes': [('104.236.42.182', 4000),
('lbrynet1.lbry.io', 4444),
('lbrynet2.lbry.io', 4444),
('lbrynet3.lbry.io', 4444)],
'POINTTRADER_SERVER': 'http://127.0.0.1:2424',
'REFLECTOR_SERVERS': [("reflector.lbry.io", 5566)],
'WALLET': "lbryum",
'UI_BRANCH': "master",
'DEFAULT_UI_BRANCH': 'master',
'DATA_DIR': default_data_dir,
'LBRYUM_WALLET_DIR': default_lbryum_dir,
'USE_AUTH_HTTP': False,
'sd_download_timeout': 3,
'max_key_fee': {'USD': {'amount': 25.0, 'address': ''}}
}
def convert_setting(setting, current_val):
new_type = setting.__class__
current_type = current_val.__class__
if current_type is bool:
if new_type is bool:
return setting
elif str(setting).lower() == "false":
return False
elif str(setting).lower() == "true":
return True
else:
raise ValueError
elif current_type is int:
return int(setting)
elif current_type is float:
return float(setting)
elif current_type is str:
return str(setting)
elif current_type is dict:
return dict(setting)
elif current_type is list:
return list(setting)
elif current_type is tuple:
return tuple(setting)
else:
raise ValueError()
class ApplicationSettings(object):
MAX_HANDSHAKE_SIZE = 2**16
MAX_REQUEST_SIZE = 2**16
MAX_BLOB_REQUEST_SIZE = 2**16
MAX_RESPONSE_INFO_SIZE = 2**16
MAX_BLOB_INFOS_TO_REQUEST = 20
BLOBFILES_DIR = "blobfiles"
BLOB_SIZE = 2**21
LOG_FILE_NAME = "lbrynet.log"
LOG_POST_URL = "https://lbry.io/log-upload"
CRYPTSD_FILE_EXTENSION = ".cryptsd"
API_INTERFACE = "localhost"
API_ADDRESS = "lbryapi"
ICON_PATH = "icons" if platform is WINDOWS else "app.icns"
APP_NAME = "LBRY"
PROTOCOL_PREFIX = "lbry"
WALLET_TYPES = ["lbryum", "lbrycrd"]
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
CURRENCIES = {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
}
LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv'
ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
@staticmethod
def get_dict():
r = {k: v for k, v in ApplicationSettings.__dict__.iteritems() if not k.startswith('__')}
if PRIORITIZE_ENV:
r = add_env_settings(r)
return r
def convert_env_setting(setting, value):
env_val = os.environ.get(setting, value)
return convert_setting(env_val, value)
def add_env_settings(settings_dict):
with_env_settings = copy.deepcopy(settings_dict)
for setting, setting_val in settings_dict.iteritems():
env_val = os.environ.get(setting, None)
if env_val != setting_val and env_val is not None:
with_env_settings.update({setting: env_val})
return with_env_settings
def get_env_settings(settings):
for setting in settings:
yield convert_env_setting(setting, settings[setting])
DEFAULT_CONFIG = ApplicationSettings.get_dict()
DEFAULT_CONFIG.update(add_env_settings(ADJUSTABLE_SETTINGS))
def add_env_settings_to_dict(settings_dict):
for setting, env_setting in zip(settings_dict, get_env_settings(settings_dict)):
settings_dict.update({setting: env_setting})
return settings_dict
class Config(object):
__shared_state = copy.deepcopy(DEFAULT_CONFIG)
class Setting(object):
__fixed = []
__excluded = ['get_dict', 'update']
def __iter__(self):
for k in self.__dict__.iterkeys():
if k.startswith('_') or k in self.__excluded:
continue
yield k
def __getitem__(self, item):
assert item in self, IndexError
return self.__dict__[item]
def __setitem__(self, key, value):
assert key in self and key not in self.__fixed, KeyError(key)
_value = convert_setting(value, self[key])
self.__dict__.update({key: _value})
def __contains__(self, item):
return item in iter(self)
def get_dict(self):
return {k: self[k] for k in self}
def update(self, other):
for k, v in other.iteritems():
try:
self.__setitem__(k, v)
except KeyError:
pass
except AssertionError:
pass
class AdjustableSettings(Setting):
def __init__(self):
self.is_generous_host = True
self.run_on_startup = False
self.download_directory = default_download_directory
self.max_upload = 0.0
self.max_download = 0.0
self.upload_log = True
self.delete_blobs_on_remove = True
self.use_upnp = True
self.start_lbrycrdd = True
self.run_reflector_server = False
self.startup_scripts = []
self.last_version = {'lbrynet': '0.0.1', 'lbryum': '0.0.1'}
self.peer_port = 3333
self.dht_node_port = 4444
self.reflector_port = 5566
self.download_timeout = 30
self.max_search_results = 25
self.search_timeout = 3.0
self.cache_time = 150
self.host_ui = True
self.check_ui_requirements = True
self.local_ui_path = False
self.api_port = 5279
self.search_servers = ['lighthouse1.lbry.io:50005']
self.data_rate = .0001 # points/megabyte
self.min_info_rate = .02 # points/1000 infos
self.min_valuable_info_rate = .05 # points/1000 infos
self.min_valuable_hash_rate = .05 # points/1000 infos
self.max_connections_per_stream = 5
self.known_dht_nodes = [('104.236.42.182', 4000),
('lbrynet1.lbry.io', 4444),
('lbrynet2.lbry.io', 4444),
('lbrynet3.lbry.io', 4444)]
self.pointtrader_server = 'http://127.0.0.1:2424'
self.reflector_servers = [("reflector.lbry.io", 5566)]
self.wallet = "lbryum"
self.ui_branch = "master"
self.default_ui_branch = 'master'
self.data_dir = default_data_dir
self.lbryum_wallet_dir = default_lbryum_dir
self.use_auth_http = False
self.sd_download_timeout = 3
self.max_key_fee = {'USD': {'amount': 25.0, 'address': ''}}
class ApplicationSettings(Setting):
def __init__(self):
self.MAX_HANDSHAKE_SIZE = 2**16
self.MAX_REQUEST_SIZE = 2**16
self.MAX_BLOB_REQUEST_SIZE = 2**16
self.MAX_RESPONSE_INFO_SIZE = 2**16
self.MAX_BLOB_INFOS_TO_REQUEST = 20
self.BLOBFILES_DIR = "blobfiles"
self.BLOB_SIZE = 2**21
self.LOG_FILE_NAME = "lbrynet.log"
self.LOG_POST_URL = "https://lbry.io/log-upload"
self.CRYPTSD_FILE_EXTENSION = ".cryptsd"
self.API_INTERFACE = "localhost"
self.API_ADDRESS = "lbryapi"
self.ICON_PATH = "icons" if platform is WINDOWS else "app.icns"
self.APP_NAME = "LBRY"
self.PROTOCOL_PREFIX = "lbry"
self.wallet_TYPES = ["lbryum", "lbrycrd"]
self.SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
self.CURRENCIES = {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
}
self.LOGGLY_TOKEN = 'LJEzATH4AzRgAwxjAP00LwZ2YGx3MwVgZTMuBQZ3MQuxLmOv'
self.ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
self.ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
APPLICATION_SETTINGS = AdjustableSettings()
ADJUSTABLE_SETTINGS = AdjustableSettings()
class DefaultSettings(ApplicationSettings, AdjustableSettings):
__fixed = APPLICATION_SETTINGS.get_dict().keys()
def __init__(self):
self.__dict__ = self.__shared_state
ApplicationSettings.__init__(self)
AdjustableSettings.__init__(self)
def update(self, settings):
for k, v in settings.iteritems():
if k in ADJUSTABLE_SETTINGS:
self.__dict__.update({k: v})
@property
def configurable_settings(self):
return {k: v for k, v in copy.deepcopy(self.__dict__).iteritems() if k in ADJUSTABLE_SETTINGS}
DEFAULT_SETTINGS = DefaultSettings()
class Config(DefaultSettings):
__shared_state = copy.deepcopy(DEFAULT_SETTINGS.get_dict())
def __init__(self):
self.__dict__ = add_env_settings_to_dict(self.__shared_state)
@property
def ORIGIN(self):
return "http://%s:%i" % (ApplicationSettings.API_INTERFACE, self.API_PORT)
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
@property
def REFERER(self):
return "http://%s:%i/" % (ApplicationSettings.API_INTERFACE, self.API_PORT)
return "http://%s:%i/" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)
@property
def API_CONNECTION_STRING(self):
return "http://%s:%i/%s" % (ApplicationSettings.API_INTERFACE, self.API_PORT, ApplicationSettings.API_ADDRESS)
return "http://%s:%i/%s" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port, DEFAULT_SETTINGS.API_ADDRESS)
@property
def UI_ADDRESS(self):
return "http://%s:%i" % (ApplicationSettings.API_INTERFACE, self.API_PORT)
@property
def LBRYUM_WALLET_DIR(self):
env_dir = os.environ.get('LBRYUM_WALLET_DIR')
if env_dir:
return env_dir
return self.__dict__.get('LBRYUM_WALLET_DIR')
return "http://%s:%i" % (DEFAULT_SETTINGS.API_INTERFACE, self.api_port)

View file

@ -17,7 +17,7 @@ class BlobAvailabilityTracker(object):
def __init__(self, blob_manager, peer_finder, dht_node):
self.availability = {}
self.last_mean_availability = Decimal(0.0)
self._last_mean_availability = Decimal(0.0)
self._blob_manager = blob_manager
self._peer_finder = peer_finder
self._dht_node = dht_node
@ -50,6 +50,11 @@ class BlobAvailabilityTracker(object):
d.addCallback(lambda results: [val for success, val in results if success])
return d
@property
def last_mean_availability(self):
return max(Decimal(0.01), self._last_mean_availability)
def _update_peers_for_blob(self, blob):
def _save_peer_info(blob_hash, peers):
v = {blob_hash: peers}
@ -86,4 +91,4 @@ class BlobAvailabilityTracker(object):
def _get_mean_peers(self):
num_peers = [len(self.availability[blob]) for blob in self.availability]
mean = Decimal(sum(num_peers)) / Decimal(max(1, len(num_peers)))
self.last_mean_availability = mean
self._last_mean_availability = mean

View file

@ -1,9 +1,10 @@
from lbrynet.core.Strategy import get_default_strategy
from lbrynet import settings
from decimal import Decimal
class BasePaymentRateManager(object):
def __init__(self, rate=settings.data_rate, info_rate=settings.MIN_BLOB_INFO_PAYMENT_RATE):
def __init__(self, rate=settings.data_rate, info_rate=settings.min_info_rate):
self.min_blob_data_payment_rate = rate
self.min_blob_info_payment_rate = info_rate
@ -35,7 +36,7 @@ class PaymentRateManager(object):
class NegotiatedPaymentRateManager(object):
def __init__(self, base, availability_tracker, generous=True):
def __init__(self, base, availability_tracker, generous=settings.is_generous_host):
"""
@param base: a BasePaymentRateManager
@param availability_tracker: a BlobAvailabilityTracker
@ -71,4 +72,10 @@ class NegotiatedPaymentRateManager(object):
self.points_paid += amount
def record_offer_reply(self, peer, offer):
self.strategy.update_accepted_offers(peer, offer)
self.strategy.update_accepted_offers(peer, offer)
def price_limit_reached(self, peer):
if peer in self.strategy.pending_sent_offers:
offer = self.strategy.pending_sent_offers[peer]
return offer.is_too_low and round(Decimal.from_float(offer.rate), 5) >= round(self.strategy.max_rate, 5)
return False

View file

@ -30,9 +30,12 @@ class MeanAvailabilityWeightedPrice(object):
mean_availability = self.blob_tracker.last_mean_availability
availability = self.blob_tracker.availability.get(blob, [])
index = 0 # blob.index
price = self.base_price * (mean_availability / Decimal(max(1, len(availability)))) / self._frontload(index)
price = self.base_price * self._get_availability_multiplier(mean_availability, availability) / self._frontload(index)
return round(price, 5)
def _get_availability_multiplier(self, mean_availability, availability):
return Decimal(max(1, mean_availability) / Decimal(max(1, len(availability))))
def _frontload(self, index):
"""
Get front-load multiplier, used to weight prices of blobs in a stream towards the front of the stream.

View file

@ -29,7 +29,7 @@ class Session(object):
def __init__(self, blob_data_payment_rate, db_dir=None, lbryid=None, peer_manager=None, dht_node_port=None,
known_dht_nodes=None, peer_finder=None, hash_announcer=None, blob_dir=None, blob_manager=None,
peer_port=None, use_upnp=True, rate_limiter=None, wallet=None, dht_node_class=node.Node,
blob_tracker_class=None, payment_rate_manager_class=None):
blob_tracker_class=None, payment_rate_manager_class=None, is_generous=True):
"""
@param blob_data_payment_rate: The default payment rate for blob data
@ -109,6 +109,7 @@ class Session(object):
self.base_payment_rate_manager = BasePaymentRateManager(blob_data_payment_rate)
self.payment_rate_manager = None
self.payment_rate_manager_class = payment_rate_manager_class or NegotiatedPaymentRateManager
self.is_generous = is_generous
def setup(self):
"""Create the blob directory and database if necessary, start all desired services"""
@ -271,7 +272,8 @@ class Session(object):
self.dht_node)
if self.payment_rate_manager is None:
self.payment_rate_manager = self.payment_rate_manager_class(self.base_payment_rate_manager,
self.blob_tracker)
self.blob_tracker,
self.is_generous)
self.rate_limiter.start()
d1 = self.blob_manager.setup()

View file

@ -1,5 +1,6 @@
from zope.interface import implementer
from decimal import Decimal
from lbrynet import settings
from lbrynet.interfaces import INegotiationStrategy
from lbrynet.core.Offer import Offer
from lbrynet.core.PriceModel import MeanAvailabilityWeightedPrice
@ -15,13 +16,14 @@ class Strategy(object):
"""
implementer(INegotiationStrategy)
def __init__(self, price_model, max_rate, min_rate, is_generous=True):
def __init__(self, price_model, max_rate, min_rate, is_generous=settings.is_generous_host):
self.price_model = price_model
self.is_generous = is_generous
self.accepted_offers = {}
self.pending_sent_offers = {}
self.offers_sent = {}
self.offers_received = {}
self.max_rate = max_rate or Decimal(self.price_model.base_price * 100)
self.max_rate = max_rate or Decimal(self.price_model.base_price * 50)
self.min_rate = Decimal(min_rate)
def _make_rate_offer(self, rates, offer_count):
@ -36,13 +38,17 @@ class Strategy(object):
if peer in self.accepted_offers:
# if there was a previous accepted offer, use that
offer = self.accepted_offers[peer]
if peer in self.pending_sent_offers:
del self.pending_sent_offers[peer]
elif offer_count == 0 and self.is_generous:
# Try asking for it for free
offer = Offer(Decimal(0.0))
self.pending_sent_offers.update({peer: offer})
else:
rates = [self.price_model.calculate_price(blob) for blob in blobs]
price = self._make_rate_offer(rates, offer_count)
offer = Offer(price)
self.pending_sent_offers.update({peer: offer})
return offer
def respond_to_offer(self, offer, peer, blobs):
@ -50,7 +56,6 @@ class Strategy(object):
self._add_offer_received(peer)
rates = [self.price_model.calculate_price(blob) for blob in blobs]
price = self._get_response_rate(rates, offer_count)
if peer in self.accepted_offers:
offer = self.accepted_offers[peer]
elif offer.rate == 0.0 and offer_count == 0 and self.is_generous:
@ -71,6 +76,7 @@ class Strategy(object):
del self.accepted_offers[peer]
if offer.is_accepted:
self.accepted_offers.update({peer: offer})
self.pending_sent_offers.update({peer: offer})
def _add_offer_sent(self, peer):
turn = self.offers_sent.get(peer, 0) + 1
@ -95,7 +101,7 @@ class BasicAvailabilityWeightedStrategy(Strategy):
implementer(INegotiationStrategy)
def __init__(self, blob_tracker, acceleration=1.25, deceleration=0.9, max_rate=None, min_rate=0.0,
is_generous=True, base_price=0.0001, alpha=1.0):
is_generous=settings.is_generous_host, base_price=0.0001, alpha=1.0):
price_model = MeanAvailabilityWeightedPrice(blob_tracker, base_price=base_price, alpha=alpha)
Strategy.__init__(self, price_model, max_rate, min_rate, is_generous)
self._acceleration = Decimal(acceleration) # rate of how quickly to ramp offer

View file

@ -390,7 +390,7 @@ class Wallet(object):
def _get_my_unspent_claim(claims):
for claim in claims:
if claim['name'] == name and not claim['is spent']:
if claim['name'] == name and not claim['is spent'] and not claim.get('supported_claimid'):
return claim
return False

View file

@ -51,8 +51,10 @@ class BlobRequester(object):
self._available_blobs = defaultdict(list) # {Peer: [blob_hash]}
self._unavailable_blobs = defaultdict(list) # {Peer: [blob_hash]}}
self._protocol_prices = {} # {ClientProtocol: price}
self._protocol_offers = {}
self._price_disagreements = [] # [Peer]
self._protocol_tries = {}
self._maxed_out_peers = []
self._incompatible_peers = []
######## IRequestCreator #########
@ -84,17 +86,18 @@ class BlobRequester(object):
if availability.can_make_request():
availability.make_request_and_handle_response()
sent_request = True
if price.can_make_request():
# TODO: document why a PriceRequest is only made if an
# Availability or Download request was made
price.make_request_and_handle_response()
sent_request = True
if download.can_make_request():
try:
download.make_request_and_handle_response()
sent_request = True
except InsufficientFundsError as err:
return defer.fail(err)
if sent_request and price.can_make_request():
# TODO: document why a PriceRequest is only made if an
# Availability or Download request was made
price.make_request_and_handle_response()
return defer.succeed(sent_request)
def _get_hash_for_peer_search(self):
@ -118,7 +121,9 @@ class BlobRequester(object):
def choose_best_peers(peers):
bad_peers = self._get_bad_peers()
return [p for p in peers if not p in bad_peers]
without_bad_peers = [p for p in peers if not p in bad_peers]
without_maxed_out_peers = [p for p in without_bad_peers if p not in self._maxed_out_peers]
return without_maxed_out_peers
d.addCallback(choose_best_peers)
@ -182,6 +187,10 @@ class RequestHelper(object):
def protocol_prices(self):
return self.requestor._protocol_prices
@property
def protocol_offers(self):
return self.requestor._protocol_offers
@property
def available_blobs(self):
return self.requestor._available_blobs[self.peer]
@ -190,6 +199,10 @@ class RequestHelper(object):
def unavailable_blobs(self):
return self.requestor._unavailable_blobs[self.peer]
@property
def maxed_out_peers(self):
return self.requestor._maxed_out_peers
def update_local_score(self, score):
self.requestor._update_local_score(self.peer, score)
@ -210,10 +223,18 @@ class RequestHelper(object):
return reason
def get_and_save_rate(self):
if self.payment_rate_manager.price_limit_reached(self.peer):
if self.peer not in self.maxed_out_peers:
self.maxed_out_peers.append(self.peer)
return None
rate = self.protocol_prices.get(self.protocol)
if rate is None:
if self.peer in self.payment_rate_manager.strategy.pending_sent_offers:
pending = self.payment_rate_manager.strategy.pending_sent_offers[self.peer]
if not pending.is_too_low and not pending.is_accepted:
return pending.rate
rate = self.payment_rate_manager.get_rate_blob_data(self.peer, self.available_blobs)
self.protocol_prices[self.protocol] = rate
self.protocol_offers[self.protocol] = rate
return rate
@ -337,7 +358,9 @@ class AvailabilityRequest(RequestHelper):
class PriceRequest(RequestHelper):
"""Ask a peer if a certain price is acceptable"""
def can_make_request(self):
return self.get_and_save_rate() is not None
if len(self.available_blobs) and not self.protocol in self.protocol_prices:
return self.get_and_save_rate() is not None
return False
def make_request_and_handle_response(self):
request = self._get_price_request()
@ -362,22 +385,19 @@ class PriceRequest(RequestHelper):
assert request.response_identifier == 'blob_data_payment_rate'
if 'blob_data_payment_rate' not in response_dict:
return InvalidResponseError("response identifier not in response")
assert self.protocol in self.protocol_prices
rate = self.protocol_prices[self.protocol]
offer = Offer(rate)
assert self.protocol in self.protocol_offers
offer = Offer(self.protocol_offers[self.protocol])
offer.handle(response_dict['blob_data_payment_rate'])
self.payment_rate_manager.record_offer_reply(self.peer.host, offer)
self.payment_rate_manager.record_offer_reply(self.peer, offer)
if offer.is_accepted:
log.debug("Offered rate %f/mb accepted by %s", rate, str(self.peer.host))
log.info("Offered rate %f/mb accepted by %s", offer.rate, self.peer.host)
self.protocol_prices[self.protocol] = offer.rate
return True
elif offer.is_too_low:
log.debug("Offered rate %f/mb rejected by %s", rate, str(self.peer.host))
del self.protocol_prices[self.protocol]
return True
log.debug("Offered rate %f/mb rejected by %s", offer.rate, self.peer.host)
return not self.payment_rate_manager.price_limit_reached(self.peer)
else:
log.warning("Price disagreement")
del self.protocol_prices[self.protocol]
self.requestor._price_disagreements.append(self.peer)
return False
@ -389,7 +409,9 @@ class DownloadRequest(RequestHelper):
self.wallet = wallet
def can_make_request(self):
return self.get_blob_details()
if self.protocol in self.protocol_prices:
return self.get_blob_details()
return False
def make_request_and_handle_response(self):
request = self._get_request()

View file

@ -69,6 +69,8 @@ class BlobRequestHandler(object):
if self.PAYMENT_RATE_QUERY in queries:
offered_rate = queries[self.PAYMENT_RATE_QUERY]
offer = Offer(offered_rate)
if offer.rate is None:
log.warning("Empty rate offer")
response.addCallback(lambda r: self._handle_payment_rate_query(offer, r))
if self.BLOB_QUERY in queries:
incoming = queries[self.BLOB_QUERY]

View file

@ -1,18 +1,24 @@
import base64
import datetime
import distutils.version
import logging
import json
import random
import os
import json
import socket
import yaml
import datetime
from lbrynet import settings
from lbrynet.conf import ADJUSTABLE_SETTINGS
from lbrynet.conf import AdjustableSettings
from lbrynet.core.cryptoutils import get_lbry_hash_obj
blobhash_length = get_lbry_hash_obj().digest_size * 2 # digest_size is in bytes, and blob hashes are hex encoded
log = logging.getLogger(__name__)
def generate_id(num=None):
h = get_lbry_hash_obj()
if num is not None:
@ -62,6 +68,8 @@ settings_encoders = {
'.yml': yaml.safe_dump
}
ADJUSTABLE_SETTINGS = AdjustableSettings().get_dict()
def load_settings(path):
ext = os.path.splitext(path)[1]
@ -82,4 +90,17 @@ def save_settings(path):
def today():
return datetime.datetime.today()
return datetime.datetime.today()
def check_connection(server="www.lbry.io", port=80):
"""Attempts to open a socket to server:port and returns True if successful."""
try:
host = socket.gethostbyname(server)
s = socket.create_connection((host, port), 2)
return True
except Exception as ex:
log.info(
"Failed to connect to %s:%s. Maybe the internet connection is not working",
server, port, exc_info=True)
return False

View file

@ -59,9 +59,9 @@ class BlindRepeaterPlugin(Plugin.Plugin):
def get_payment_rate_manager(rates):
data_rate = rates[0][1] if rates[0][0] is True else None
info_rate = rates[1][1] if rates[1][0] is True else None
info_rate = info_rate if info_rate is not None else settings.MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE
info_rate = info_rate if info_rate is not None else settings.min_valuable_info_rate
hash_rate = rates[2][1] if rates[2][0] is True else None
hash_rate = hash_rate if hash_rate is not None else settings.MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE
hash_rate = hash_rate if hash_rate is not None else settings.min_valuable_hash_rate
self.payment_rate_manager = BlindRepeaterPaymentRateManager(default_payment_rate_manager,
info_rate, hash_rate,
blob_data_rate=data_rate)

View file

@ -5,7 +5,6 @@ import os
import platform
import random
import re
import socket
import subprocess
import sys
import base58
@ -55,6 +54,7 @@ from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient
from lbrynet.lbrynet_daemon.auth.server import AuthJSONRPCServer
# TODO: this code snippet is everywhere. Make it go away
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
@ -76,7 +76,7 @@ else:
INITIALIZING_CODE = 'initializing'
LOADING_DB_CODE = 'loading_db'
LOADING_WALLET_CODE = 'loading_wallet'
LOADING_wallet_CODE = 'loading_wallet'
LOADING_FILE_MANAGER_CODE = 'loading_file_manager'
LOADING_SERVER_CODE = 'loading_server'
STARTED_CODE = 'started'
@ -84,7 +84,7 @@ WAITING_FOR_FIRST_RUN_CREDITS = 'waiting_for_credits'
STARTUP_STAGES = [
(INITIALIZING_CODE, 'Initializing...'),
(LOADING_DB_CODE, 'Loading databases...'),
(LOADING_WALLET_CODE, 'Catching up with the blockchain... %s'),
(LOADING_wallet_CODE, 'Catching up with the blockchain... %s'),
(LOADING_FILE_MANAGER_CODE, 'Setting up file manager'),
(LOADING_SERVER_CODE, 'Starting lbrynet'),
(STARTED_CODE, 'Started lbrynet'),
@ -105,11 +105,11 @@ STREAM_STAGES = [
CONNECT_CODE_VERSION_CHECK = 'version_check'
CONNECT_CODE_NETWORK = 'network_connection'
CONNECT_CODE_WALLET = 'wallet_catchup_lag'
CONNECT_CODE_wallet = 'wallet_catchup_lag'
CONNECTION_PROBLEM_CODES = [
(CONNECT_CODE_VERSION_CHECK, "There was a problem checking for updates on github"),
(CONNECT_CODE_NETWORK, "Your internet connection appears to have been interrupted"),
(CONNECT_CODE_WALLET, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY")
(CONNECT_CODE_wallet, "Synchronization with the blockchain is lagging... if this continues try restarting LBRY")
]
BAD_REQUEST = 400
@ -118,7 +118,7 @@ OK_CODE = 200
# TODO alert if your copy of a lbry file is out of date with the name record
REMOTE_SERVER = "www.google.com"
REMOTE_SERVER = "www.lbry.io"
class Parameters(object):
@ -131,8 +131,8 @@ class Daemon(AuthJSONRPCServer):
LBRYnet daemon, a jsonrpc interface to lbry functions
"""
def __init__(self, root, use_authentication=lbrynet_settings.USE_AUTH_HTTP):
AuthJSONRPCServer.__init__(self, use_authentication)
def __init__(self, root):
AuthJSONRPCServer.__init__(self, lbrynet_settings.use_auth_http)
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
self.allowed_during_startup = ['is_running', 'is_first_run',
@ -141,7 +141,7 @@ class Daemon(AuthJSONRPCServer):
'version', 'get_search_servers']
last_version = {'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}}
lbrynet_settings.update(last_version)
self.db_dir = lbrynet_settings.DATA_DIR
self.db_dir = lbrynet_settings.data_dir
self.download_directory = lbrynet_settings.download_directory
self.created_data_dir = False
if not os.path.exists(self.db_dir):
@ -163,7 +163,7 @@ class Daemon(AuthJSONRPCServer):
self.download_timeout = lbrynet_settings.download_timeout
self.max_search_results = lbrynet_settings.max_search_results
self.run_reflector_server = lbrynet_settings.run_reflector_server
self.wallet_type = lbrynet_settings.WALLET
self.wallet_type = lbrynet_settings.wallet
self.delete_blobs_on_remove = lbrynet_settings.delete_blobs_on_remove
self.peer_port = lbrynet_settings.peer_port
self.reflector_port = lbrynet_settings.reflector_port
@ -190,6 +190,19 @@ class Daemon(AuthJSONRPCServer):
self.uploaded_temp_files = []
self._session_id = base58.b58encode(generate_id())
self.lbryid = None
self.daemon_conf = os.path.join(self.db_dir, 'daemon_settings.yml')
if os.path.isfile(self.daemon_conf):
conf_settings = utils.load_settings(self.daemon_conf)
if 'last_version' in conf_settings:
if utils.version_is_greater_than(lbrynet_version, conf_settings['last_version']['lbrynet']):
self.first_run_after_update = True
log.info("First run after update")
log.info("lbrynet %s --> %s", conf_settings['last_version']['lbrynet'], lbrynet_version)
log.info("lbryum %s --> %s", conf_settings['last_version']['lbryum'], lbryum_version)
# utils.save_settings(self.daemon_conf)
self.wallet_user = None
self.wallet_password = None
@ -243,6 +256,13 @@ class Daemon(AuthJSONRPCServer):
self.wallet_dir = os.path.join(os.path.expanduser("~"), ".lbrycrd")
self.lbrycrd_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
self.wallet_conf = os.path.join(self.wallet_dir, "lbrycrd.conf")
if os.name != 'nt':
# TODO: are we still using this?
lbrycrdd_path_conf = os.path.join(os.path.expanduser("~"), ".lbrycrddpath.conf")
if not os.path.isfile(lbrycrdd_path_conf):
f = open(lbrycrdd_path_conf, "w")
f.write(str(self.lbrycrdd_path))
f.close()
def setup(self):
def _log_starting_vals():
@ -358,13 +378,7 @@ class Daemon(AuthJSONRPCServer):
self._events = analytics.Events(context, base58.b58encode(self.lbryid), self._session_id)
def _check_network_connection(self):
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
self.connected_to_internet = True
except:
log.info("Internet connection not working")
self.connected_to_internet = False
self.connected_to_internet = utils.check_connection()
def _check_lbrynet_connection(self):
def _log_success():
@ -709,7 +723,7 @@ class Daemon(AuthJSONRPCServer):
return d
def _set_lbryid(self, lbryid):
if lbryid is None:
if lbryid is None or True:
return self._make_lbryid()
else:
log.info("LBRY ID: " + base58.b58encode(lbryid))
@ -762,8 +776,8 @@ class Daemon(AuthJSONRPCServer):
elif self.wallet_type == "lbryum":
log.info("Using lbryum wallet")
config = {'auto-connect': True}
if lbrynet_settings.LBRYUM_WALLET_DIR:
config['lbryum_path'] = lbrynet_settings.LBRYUM_WALLET_DIR
if lbrynet_settings.lbryum_wallet_dir:
config['lbryum_path'] = lbrynet_settings.lbryum_wallet_dir
d = defer.succeed(LBRYumWallet(self.db_dir, config))
elif self.wallet_type == "ptc":
log.info("Using PTC wallet")
@ -1077,7 +1091,7 @@ class Daemon(AuthJSONRPCServer):
log.info("Reflecting stream: %s" % stream_hash)
reflector_server = random.choice(lbrynet_settings.REFLECTOR_SERVERS)
reflector_server = random.choice(lbrynet_settings.reflector_servers)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Start reflector client")
factory = reflector.ClientFactory(
@ -1096,7 +1110,7 @@ class Daemon(AuthJSONRPCServer):
log.info("Reflecting %i blobs" % len(blob_hashes))
reflector_server = random.choice(lbrynet_settings.REFLECTOR_SERVERS)
reflector_server = random.choice(lbrynet_settings.reflector_servers)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Start reflector client")
factory = reflector.BlobClientFactory(
@ -1120,7 +1134,7 @@ class Daemon(AuthJSONRPCServer):
remaining_scripts = [s for s in self.startup_scripts if 'run_once' not in s.keys()]
startup_scripts = self.startup_scripts
self.startup_scripts = lbrynet_settings.startup_scripts = remaining_scripts
conf = os.path.join(lbrynet_settings.DATA_DIR, "daemon_settings.yml")
conf = os.path.join(lbrynet_settings.data_dir, "daemon_settings.yml")
utils.save_settings(conf)
for script in startup_scripts:
@ -1175,7 +1189,7 @@ class Daemon(AuthJSONRPCServer):
r['problem_code'] = self.connection_problem[0]
r['message'] = self.connection_problem[1]
r['is_lagging'] = True
elif self.startup_status[0] == LOADING_WALLET_CODE:
elif self.startup_status[0] == LOADING_wallet_CODE:
if self.wallet_type == 'lbryum':
if self.session.wallet.blocks_behind_alert != 0:
r['message'] = r['message'] % (str(self.session.wallet.blocks_behind_alert) + " blocks behind")
@ -1727,11 +1741,16 @@ class Daemon(AuthJSONRPCServer):
metadata = Metadata(p['metadata'])
make_lbry_file = False
sd_hash = metadata['sources']['lbry_sd_hash']
log.info("Update publish for %s using existing stream", name)
except ValidationError:
make_lbry_file = True
sd_hash = None
metadata = p['metadata']
file_path = p['file_path']
if not file_path:
return defer.fail(Exception("No file given to publish"))
if not os.path.isfile(file_path):
return defer.fail(Exception("Specified file for publish doesnt exist: %s" % file_path))
if not self.pending_claim_checker.running:
self.pending_claim_checker.start(30)

View file

@ -2,6 +2,7 @@ import sys
import json
import argparse
from lbrynet import settings
from lbrynet.lbrynet_daemon.auth.client import LBRYAPIClient
help_msg = "Usage: lbrynet-cli method json-args\n" \
@ -40,9 +41,15 @@ def main():
try:
status = api.daemon_status()
assert status.get('code', False) == "started"
except:
print "lbrynet-daemon isn't running"
sys.exit(1)
except Exception:
try:
settings.update({'use_auth_http': not settings.use_auth_http})
api = LBRYAPIClient.config()
status = api.daemon_status()
assert status.get('code', False) == "started"
except Exception:
print "lbrynet-daemon isn't running"
sys.exit(1)
parser = argparse.ArgumentParser()
parser.add_argument('method', nargs=1)
@ -72,9 +79,9 @@ def main():
if meth in api.help():
try:
if params:
result = LBRYAPIClient.config(service=meth)(params)
result = LBRYAPIClient.config(service=meth, params=params)
else:
result = LBRYAPIClient.config(service=meth)()
result = LBRYAPIClient.config(service=meth, params=params)
print json.dumps(result, sort_keys=True)
except:
print "Something went wrong, here's the usage for %s:" % meth

View file

@ -3,7 +3,6 @@ import logging.handlers
import os
import webbrowser
import sys
import socket
from twisted.web import server, guard
from twisted.internet import defer, reactor
@ -11,14 +10,15 @@ from twisted.cred import portal
from jsonrpc.proxy import JSONRPCProxy
from lbrynet.core import log_support, utils
from lbrynet.lbrynet_daemon.auth.auth import PasswordChecker, HttpPasswordRealm
from lbrynet.lbrynet_daemon.auth.util import initialize_api_key_file
from lbrynet.core import log_support
from lbrynet.core import utils
from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer
from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest
from lbrynet import settings
log_dir = settings.DATA_DIR
log_dir = settings.data_dir
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
@ -27,19 +27,12 @@ lbrynet_log = os.path.join(log_dir, settings.LOG_FILE_NAME)
log = logging.getLogger(__name__)
REMOTE_SERVER = "www.google.com"
if getattr(sys, 'frozen', False) and os.name == "nt":
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(os.path.dirname(sys.executable), "cacert.pem")
def test_internet_connection():
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
return True
except:
return False
return utils.check_connection()
def stop():
@ -70,7 +63,7 @@ def start():
parser.add_argument("--branch",
help="Branch of lbry-web-ui repo to use, defaults on master",
default=settings.UI_BRANCH)
default=settings.ui_branch)
parser.add_argument("--http-auth",
dest="useauth",
@ -92,7 +85,7 @@ def start():
action='store_true',
help='enable more debug output for the console')
parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False, useauth=settings.USE_AUTH_HTTP)
parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False, useauth=settings.use_auth_http)
args = parser.parse_args()
log_support.configure_file_handler(lbrynet_log)
@ -104,16 +97,17 @@ def start():
log_support.disable_noisy_loggers()
to_pass = {}
settings_path = os.path.join(settings.DATA_DIR, "daemon_settings.yml")
settings_path = os.path.join(settings.data_dir, "daemon_settings.yml")
if os.path.isfile(settings_path):
to_pass.update(utils.load_settings(settings_path))
log.info("Loaded settings file")
if args.ui:
to_pass.update({'local_ui_path': args.ui})
if args.branch:
to_pass.update({'UI_BRANCH': args.branch})
to_pass.update({'USE_AUTH_HTTP': args.useauth})
to_pass.update({'WALLET': args.wallet})
to_pass.update({'ui_branch': args.branch})
to_pass.update({'use_auth_http': args.useauth})
to_pass.update({'wallet': args.wallet})
print to_pass
settings.update(to_pass)
try:
@ -132,20 +126,20 @@ def start():
if not args.logtoconsole and not args.quiet:
print "Starting lbrynet-daemon from command line"
print "To view activity, view the log file here: " + lbrynet_log
print "Web UI is available at http://%s:%i" % (settings.API_INTERFACE, settings.API_PORT)
print "Web UI is available at http://%s:%i" % (settings.API_INTERFACE, settings.api_port)
print "JSONRPC API is available at " + settings.API_CONNECTION_STRING
print "To quit press ctrl-c or call 'stop' via the API"
if test_internet_connection():
lbry = DaemonServer()
d = lbry.start(args.useauth)
d = lbry.start()
if args.launchui:
d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS))
if settings.USE_AUTH_HTTP:
if settings.use_auth_http:
log.info("Using authenticated API")
pw_path = os.path.join(settings.DATA_DIR, ".api_keys")
pw_path = os.path.join(settings.data_dir, ".api_keys")
initialize_api_key_file(pw_path)
checker = PasswordChecker.load_file(pw_path)
realm = HttpPasswordRealm(lbry.root)
@ -158,7 +152,7 @@ def start():
lbrynet_server = server.Site(_lbrynet_server)
lbrynet_server.requestFactory = DaemonRequest
reactor.listenTCP(settings.API_PORT, lbrynet_server, interface=settings.API_INTERFACE)
reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE)
reactor.run()
if not args.logtoconsole and not args.quiet:

View file

@ -22,15 +22,15 @@ log = logging.getLogger(__name__)
class DaemonServer(object):
def _setup_server(self, use_authentication):
def _setup_server(self):
self.root = LBRYindex(os.path.join(os.path.join(data_dir, "lbry-ui"), "active"))
self._api = Daemon(self.root, use_authentication=use_authentication)
self._api = Daemon(self.root)
self.root.putChild("view", HostedEncryptedFile(self._api))
self.root.putChild("upload", EncryptedFileUpload(self._api))
self.root.putChild(settings.API_ADDRESS, self._api)
return defer.succeed(True)
def start(self, use_authentication):
d = self._setup_server(use_authentication)
def start(self):
d = self._setup_server()
d.addCallback(lambda _: self._api.setup())
return d

View file

@ -41,7 +41,7 @@ class Publisher(object):
self.lbry_file = None
self.txid = None
self.stream_hash = None
reflector_server = random.choice(settings.REFLECTOR_SERVERS)
reflector_server = random.choice(settings.reflector_servers)
self.reflector_server, self.reflector_port = reflector_server[0], reflector_server[1]
self.metadata = {}
@ -74,7 +74,7 @@ class Publisher(object):
return d
def start_reflector(self):
reflector_server = random.choice(settings.REFLECTOR_SERVERS)
reflector_server = random.choice(settings.reflector_servers)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Reflecting new publication")
factory = reflector.ClientFactory(

View file

@ -76,7 +76,7 @@ class UIManager(object):
def setup(self, branch=None, check_requirements=None, user_specified=None):
local_ui_path = settings.local_ui_path or user_specified
self.branch = settings.UI_BRANCH or branch
self.branch = settings.ui_branch or branch
self.check_requirements = settings.check_ui_requirements or check_requirements
if local_ui_path:

View file

@ -7,6 +7,7 @@ import json
from lbrynet.lbrynet_daemon.auth.util import load_api_keys, APIKey, API_KEY_NAME, get_auth_message
from lbrynet import settings
from jsonrpc.proxy import JSONRPCProxy
log = logging.getLogger(__name__)
USER_AGENT = "AuthServiceProxy/0.1"
@ -21,7 +22,7 @@ class JSONRPCException(Exception):
self.error = rpc_error
class LBRYAPIClient(object):
class AuthAPIClient(object):
def __init__(self, key, timeout, connection, count, service, cookies, auth, url, login_url):
self.__service_name = service
self.__api_key = key
@ -38,7 +39,7 @@ class LBRYAPIClient(object):
raise AttributeError
if self.__service_name is not None:
name = "%s.%s" % (self.__service_name, name)
return LBRYAPIClient(key=self.__api_key,
return AuthAPIClient(key=self.__api_key,
timeout=HTTP_TIMEOUT,
connection=self.__conn,
count=self.__id_count,
@ -101,7 +102,7 @@ class LBRYAPIClient(object):
service=None, cookies=None, auth=None, url=None, login_url=None):
api_key_name = API_KEY_NAME if not key_name else key_name
pw_path = os.path.join(settings.DATA_DIR, ".api_keys") if not pw_path else pw_path
pw_path = os.path.join(settings.data_dir, ".api_keys") if not pw_path else pw_path
if not key:
keys = load_api_keys(pw_path)
api_key = keys.get(api_key_name, False)
@ -111,7 +112,7 @@ class LBRYAPIClient(object):
service_url = "http://%s:%s@%s:%i/%s" % (api_key_name,
api_key.secret,
settings.API_INTERFACE,
settings.API_PORT,
settings.api_port,
settings.API_ADDRESS)
else:
service_url = login_url
@ -152,4 +153,20 @@ class LBRYAPIClient(object):
assert cookies.get(LBRY_SECRET, False), "Missing cookie"
secret = cookies.get(LBRY_SECRET)
api_key = APIKey(secret, api_key_name)
return cls(api_key, timeout, conn, id_count, service, cookies, auth_header, url, service_url)
return cls(api_key, timeout, conn, id_count, service, cookies, auth_header, url, service_url)
class LBRYAPIClient(object):
@staticmethod
def config(service=None, params=None):
if settings.use_auth_http:
if service is None:
return AuthAPIClient.config()
log.error("Try auth")
if params is not None:
return AuthAPIClient.config(service=service)(params)
return AuthAPIClient.config(service=service)()
url = settings.API_CONNECTION_STRING
if service is None:
return JSONRPCProxy.from_url(url)
return JSONRPCProxy.from_url(url).call(service, params)

View file

@ -81,7 +81,7 @@ class AuthJSONRPCServer(AuthorizedBase):
NOT_FOUND = 8001
FAILURE = 8002
def __init__(self, use_authentication=settings.USE_AUTH_HTTP):
def __init__(self, use_authentication=settings.use_auth_http):
AuthorizedBase.__init__(self)
self._use_authentication = use_authentication
self.allowed_during_startup = []

View file

@ -46,7 +46,7 @@ def get_body_from_request(path, data):
jsondata = FileBodyProducer(StringIO(json.dumps(data)))
agent = Agent(reactor)
d = agent.request('POST', settings.POINTTRADER_SERVER + path, Headers({'Content-Type': ['application/json']}), jsondata)
d = agent.request('POST', settings.pointtrader_server + path, Headers({'Content-Type': ['application/json']}), jsondata)
d.addCallback(get_body)
return d

View file

@ -3,16 +3,11 @@ import webbrowser
import sys
import os
import logging
import socket
import platform
import shutil
from appdirs import user_data_dir
from PyObjCTools import AppHelper
from twisted.internet import reactor
from twisted.web import server
import Foundation
bundle = Foundation.NSBundle.mainBundle()
lbrycrdd_path = bundle.pathForResource_ofType_('lbrycrdd', None)
@ -30,22 +25,17 @@ if not os.path.isfile(lbrycrdd_path_conf):
from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer
from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest
from lbrynet import settings
from lbrynet.core import utils
if platform.mac_ver()[0] >= "10.10":
from LBRYNotify import LBRYNotify
log = logging.getLogger(__name__)
REMOTE_SERVER = "www.google.com"
def test_internet_connection():
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
return True
except:
return False
return utils.check_connection()
class LBRYDaemonApp(AppKit.NSApplication):
@ -79,7 +69,7 @@ class LBRYDaemonApp(AppKit.NSApplication):
d.addCallback(lambda _: webbrowser.open(settings.UI_ADDRESS))
lbrynet_server = server.Site(lbry.root)
lbrynet_server.requestFactory = DaemonRequest
reactor.listenTCP(settings.API_PORT, lbrynet_server, interface=settings.API_INTERFACE)
reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE)
def openui_(self, sender):
webbrowser.open(settings.UI_ADDRESS)

View file

@ -15,8 +15,6 @@ if [ ! -f "$LBRYCRDCONF" ]; then
echo -e "rpcuser=lbryrpc\nrpcpassword=$(env LC_CTYPE=C LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 16 | xargs)" > "$LBRYCRDCONF"
fi
WEB_UI_BRANCH='master'
urlencode() {
local LANG=C
local length="${#1}"
@ -42,7 +40,7 @@ DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
if [ -z "$(pgrep lbrynet-daemon)" ]; then
echo "running lbrynet-daemon..."
$DIR/lbrynet-daemon --no-launch --branch="$WEB_UI_BRANCH" &
$DIR/lbrynet-daemon --no-launch &
sleep 3 # let the daemon load before connecting
fi

View file

@ -1,5 +1,5 @@
[Desktop Entry]
Version=0.6.7
Version=0.6.9
Name=LBRY
Comment=The world's first user-owned content marketplace
Icon=lbry

View file

@ -18,6 +18,7 @@ try:
except ImportError:
import win32gui
from lbrynet.core import utils
from lbrynet.lbrynet_daemon.DaemonServer import DaemonServer
from lbrynet.lbrynet_daemon.DaemonRequest import DaemonRequest
from lbrynet import settings
@ -35,16 +36,9 @@ log = logging.getLogger(__name__)
if getattr(sys, 'frozen', False) and os.name == "nt":
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(os.path.dirname(sys.executable), "cacert.pem")
REMOTE_SERVER = "www.google.com"
def test_internet_connection():
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
return True
except:
return False
return utils.check_connection()
def non_string_iterable(obj):
@ -290,7 +284,7 @@ def main(lbry_name=None):
lbrynet_server = server.Site(lbry.root)
lbrynet_server.requestFactory = DaemonRequest
try:
reactor.listenTCP(settings.API_PORT, lbrynet_server, interface=settings.API_INTERFACE)
reactor.listenTCP(settings.api_port, lbrynet_server, interface=settings.API_INTERFACE)
except error.CannotListenError:
log.info('Daemon already running, exiting app')
sys.exit(1)
@ -313,4 +307,4 @@ if __name__ == '__main__':
if start_daemon:
main(lbry_name)
else:
LBRYURIHandler.open_address(lbry_name)
LBRYURIHandler.open_address(lbry_name)

View file

@ -1,4 +1,4 @@
C:\Python27\Scripts\pip.exe install mock
C:\Python27\Scripts\pip.exe install pylint
C:\Python27\python.exe C:\Python27\Scripts\trial.py C:\projects\lbry\tests\unit
if ($LastExitCode -ne 0) { $host.SetShouldExit($LastExitCode) }

View file

@ -29,4 +29,5 @@ wsgiref==0.1.2
zope.interface==4.1.3
base58==0.2.2
googlefinance==0.7
pyyaml==3.12
pyyaml==3.12
service_identity==16.0.0

View file

@ -14,7 +14,8 @@ from lbrynet import settings
from lbrynet.lbrylive.LiveStreamCreator import FileLiveStreamCreator
from lbrynet.lbrylive.LiveStreamMetadataManager import DBLiveStreamMetadataManager
from lbrynet.lbrylive.LiveStreamMetadataManager import TempLiveStreamMetadataManager
from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager, DBEncryptedFileMetadataManager
from lbrynet.lbryfile.EncryptedFileMetadataManager import TempEncryptedFileMetadataManager, \
DBEncryptedFileMetadataManager
from lbrynet.lbryfilemanager.EncryptedFileManager import EncryptedFileManager
from lbrynet.core.PTCWallet import PointTraderKeyQueryHandlerFactory, PointTraderKeyExchanger
from lbrynet.core.Session import Session
@ -39,7 +40,6 @@ from lbrynet.lbrylive.server.LiveBlobInfoQueryHandler import CryptBlobInfoQueryH
from lbrynet.lbrylive.client.LiveStreamOptions import add_live_stream_to_sd_identifier
from lbrynet.lbrylive.client.LiveStreamDownloader import add_full_live_stream_downloader_to_sd_identifier
log_format = "%(funcName)s(): %(message)s"
logging.basicConfig(level=logging.WARNING, format=log_format)
@ -47,6 +47,7 @@ logging.basicConfig(level=logging.WARNING, format=log_format)
def require_system(system):
def wrapper(fn):
return fn
if platform.system() == system:
return wrapper
else:
@ -125,7 +126,6 @@ class FakePeerFinder(object):
class FakeAnnouncer(object):
def __init__(self, *args):
pass
@ -175,7 +175,7 @@ class GenFile(io.RawIOBase):
def readall(self):
return self.read()
def _generate_chunk(self, n=2**10):
def _generate_chunk(self, n=2 ** 10):
output = self.pattern[self.last_offset:self.last_offset + n]
n_left = n - len(output)
whole_patterns = n_left / len(self.pattern)
@ -190,15 +190,15 @@ test_create_stream_sd_file = {
'blobs': [
{'length': 2097152, 'blob_num': 0,
'blob_hash':
'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
'iv': '30303030303030303030303030303031'},
{'length': 2097152, 'blob_num': 1,
'blob_hash':
'f4067522c1b49432a2a679512e3917144317caa1abba0c041e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
'f4067522c1b49432a2a679512e3917144317caa1abba0c041e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
'iv': '30303030303030303030303030303032'},
{'length': 1015056, 'blob_num': 2,
'blob_hash':
'305486c434260484fcb2968ce0e963b72f81ba56c11b08b1af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
'305486c434260484fcb2968ce0e963b72f81ba56c11b08b1af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
'iv': '30303030303030303030303030303033'},
{'length': 0, 'blob_num': 3, 'iv': '30303030303030303030303030303034'}],
'stream_type': 'lbryfile',
@ -207,7 +207,7 @@ test_create_stream_sd_file = {
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'}
def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rate_limit=None):
def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rate_limit=None, is_generous=False):
if sys.platform.startswith("linux"):
sys.modules = sys.modules.copy()
del sys.modules['twisted.internet.reactor']
@ -231,14 +231,14 @@ def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rat
rate_limiter = RateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "server"
os.mkdir(db_dir)
session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node)
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node, is_generous=settings.is_generous_host)
stream_info_manager = TempEncryptedFileMetadataManager()
@ -319,8 +319,7 @@ def start_lbry_uploader(sd_hash_queue, kill_event, dead_event, file_size, ul_rat
reactor.run()
def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_rate_limit=None):
def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_rate_limit=None, is_generous=False):
if sys.platform.startswith("linux"):
sys.modules = sys.modules.copy()
del sys.modules['twisted.internet.reactor']
@ -351,9 +350,10 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra
os.mkdir(blob_dir)
session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd" + str(n),
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
stream_info_manager = TempEncryptedFileMetadataManager()
@ -433,7 +433,6 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, ready_event, n, ul_ra
def start_live_server(sd_hash_queue, kill_event, dead_event):
if sys.platform.startswith("linux"):
sys.modules = sys.modules.copy()
del sys.modules['twisted.internet.reactor']
@ -457,14 +456,13 @@ def start_live_server(sd_hash_queue, kill_event, dead_event):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "server"
os.mkdir(db_dir)
session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker)
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
stream_info_manager = DBLiveStreamMetadataManager(session.db_dir, hash_announcer)
logging.debug("Created the session")
@ -564,8 +562,7 @@ def start_live_server(sd_hash_queue, kill_event, dead_event):
reactor.run()
def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow):
def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_generous=False):
if sys.platform.startswith("linux"):
sys.modules = sys.modules.copy()
del sys.modules['twisted.internet.reactor']
@ -596,12 +593,13 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow):
os.mkdir(blob_dir)
session = Session(settings.data_rate, db_dir=db_dir, lbryid="efgh",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
if slow is True:
session.rate_limiter.set_ul_limit(2**11)
session.rate_limiter.set_ul_limit(2 ** 11)
def start_all():
d = session.setup()
@ -652,7 +650,7 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow):
def create_single_blob():
blob_creator = session.blob_manager.get_blob_creator()
blob_creator.write("0" * 2**21)
blob_creator.write("0" * 2 ** 21)
return blob_creator.close()
def put_blob_hash_on_queue(blob_hash):
@ -671,6 +669,7 @@ class TestTransfer(TestCase):
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
self.is_generous = True
self.addCleanup(self.take_down_env)
def take_down_env(self):
@ -761,17 +760,17 @@ class TestTransfer(TestCase):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "client"
blob_dir = os.path.join(db_dir, "blobfiles")
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node, is_generous=settings.is_generous_host)
self.stream_info_manager = TempEncryptedFileMetadataManager()
@ -854,9 +853,10 @@ class TestTransfer(TestCase):
os.mkdir(db_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node)
peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=None,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, dht_node_class=Node,
is_generous=settings.is_generous_host)
self.stream_info_manager = TempLiveStreamMetadataManager(hash_announcer)
@ -948,16 +948,16 @@ class TestTransfer(TestCase):
hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter()
db_dir = "client"
blob_dir = os.path.join(db_dir, "blobfiles")
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
d1 = self.wait_for_hash_from_queue(blob_hash_queue_1)
d2 = self.wait_for_hash_from_queue(blob_hash_queue_2)
@ -1026,7 +1026,6 @@ class TestTransfer(TestCase):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
downloaders = []
db_dir = "client"
@ -1035,9 +1034,10 @@ class TestTransfer(TestCase):
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553, use_upnp=False,
rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553, use_upnp=False,
rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir)
self.lbry_file_manager = EncryptedFileManager(self.session, self.stream_info_manager, sd_identifier)
@ -1072,7 +1072,8 @@ class TestTransfer(TestCase):
logging.debug("deleting the file...")
d = self.lbry_file_manager.delete_lbry_file(downloaders[0])
d.addCallback(lambda _: self.lbry_file_manager.get_count_for_stream_hash(downloaders[0].stream_hash))
d.addCallback(lambda c: self.stream_info_manager.delete_stream(downloaders[1].stream_hash) if c == 0 else True)
d.addCallback(
lambda c: self.stream_info_manager.delete_stream(downloaders[1].stream_hash) if c == 0 else True)
return d
def check_lbry_file():
@ -1130,7 +1131,7 @@ class TestTransfer(TestCase):
dead_events = [Event() for _ in range(num_uploaders)]
ready_events = [Event() for _ in range(1, num_uploaders)]
uploader = Process(target=start_lbry_uploader, args=(sd_hash_queue, kill_event, dead_events[0],
9373419, 2**22))
9373419, 2 ** 22))
uploader.start()
self.server_processes.append(uploader)
@ -1143,16 +1144,16 @@ class TestTransfer(TestCase):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "client"
blob_dir = os.path.join(db_dir, "blobfiles")
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
self.stream_info_manager = TempEncryptedFileMetadataManager()
@ -1161,7 +1162,7 @@ class TestTransfer(TestCase):
def start_additional_uploaders(sd_hash):
for i in range(1, num_uploaders):
uploader = Process(target=start_lbry_reuploader,
args=(sd_hash, kill_event, dead_events[i], ready_events[i-1], i, 2**10))
args=(sd_hash, kill_event, dead_events[i], ready_events[i - 1], i, 2 ** 10))
uploader.start()
self.server_processes.append(uploader)
return defer.succeed(True)
@ -1228,12 +1229,12 @@ class TestTransfer(TestCase):
class TestStreamify(TestCase):
def setUp(self):
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
self.addCleanup(self.take_down_env)
self.is_generous = True
def take_down_env(self):
@ -1254,7 +1255,6 @@ class TestStreamify(TestCase):
return d
def test_create_stream(self):
wallet = FakeWallet()
peer_manager = PeerManager()
peer_finder = FakePeerFinder(5553, peer_manager, 2)
@ -1262,16 +1262,16 @@ class TestStreamify(TestCase):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "client"
blob_dir = os.path.join(db_dir, "blobfiles")
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
self.stream_info_manager = TempEncryptedFileMetadataManager()
@ -1315,16 +1315,16 @@ class TestStreamify(TestCase):
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
db_dir = "client"
blob_dir = os.path.join(db_dir, "blobfiles")
os.mkdir(db_dir)
os.mkdir(blob_dir)
self.session = Session(settings.data_rate, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker)
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=settings.is_generous_host)
self.stream_info_manager = DBEncryptedFileMetadataManager(self.session.db_dir)
@ -1336,7 +1336,6 @@ class TestStreamify(TestCase):
return d
def combine_stream(stream_hash):
prm = self.session.payment_rate_manager
d = self.lbry_file_manager.add_lbry_file(stream_hash, prm)
d.addCallback(start_lbry_file)

View file

@ -157,7 +157,6 @@ class DummyBlobAvailabilityTracker(BlobAvailabilityTracker):
'f99d24cd50d4bfd77c2598bfbeeb8415bf0feef21200bdf0b8fbbde7751a77b7a2c68e09c25465a2f40fba8eecb0b4e0': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
'c84aa1fd8f5009f7c4e71e444e40d95610abc1480834f835eefb267287aeb10025880a3ce22580db8c6d92efb5bc0c9c': ['1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4', '1.2.3.4'],
}
self.last_mean_availability = Decimal(0.0)
self._blob_manager = None
self._peer_finder = PeerFinder(11223, 11224, 2)
self._dht_node = None