Merge branch 'master' into content_type

fix conflicts:
	lbrynet/lbrynet_daemon/LBRYPublisher.py
This commit is contained in:
Jack 2016-08-25 17:59:20 -04:00
commit 80837cb040
45 changed files with 1623 additions and 96 deletions

View file

@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.3.19
current_version = 0.3.21
commit = True
tag = True
message = Bump version: {current_version} -> {new_version}

59
appveyor.yml Normal file
View file

@ -0,0 +1,59 @@
version: 1.0.{build}
init:
- ps: $env:Path += ";C:\MinGW\bin\"
- ps: gcc --version
- ps: mingw32-make --version
- ps: mkdir C:\temp
- ps: Invoke-WebRequest "https://pypi.python.org/packages/55/90/e987e28ed29b571f315afea7d317b6bf4a551e37386b344190cffec60e72/miniupnpc-1.9.tar.gz" -OutFile "C:\temp\miniupnpc-1.9.tar.gz"
- ps: cd C:\temp
- ps: 7z e miniupnpc-1.9.tar.gz
- ps: 7z x miniupnpc-1.9.tar
- ps: cd C:\temp\miniupnpc-1.9
- ps: |
mingw32-make.exe -f Makefile.mingw
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py build --compiler=mingw32
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py install
- ps: Invoke-WebRequest "https://github.com/lbryio/lbry/raw/master/packaging/windows/libs/gmpy-1.17-cp27-none-win32.whl" -OutFile "C:\temp\gmpy-1.17-cp27-none-win32.whl"
- ps: C:\Python27\Scripts\pip.exe install "C:\temp\gmpy-1.17-cp27-none-win32.whl"
- ps: C:\Python27\Scripts\pip.exe install pypiwin32==219
- ps: C:\Python27\Scripts\pip.exe install six==1.9.0
- ps: C:\Python27\Scripts\pip.exe install requests==2.9.1
- ps: C:\Python27\Scripts\pip.exe install zope.interface==4.1.3
- ps: C:\Python27\Scripts\pip.exe install cx-freeze==4.3.3
- ps: C:\Python27\Scripts\pip.exe install cython==0.24.1
- ps: C:\Python27\Scripts\pip.exe install Twisted==16.0.0
- ps: C:\Python27\Scripts\pip.exe install Yapsy==1.11.223
- ps: C:\Python27\Scripts\pip.exe install appdirs==1.4.0
- ps: C:\Python27\Scripts\pip.exe install argparse==1.2.1
- ps: C:\Python27\Scripts\pip.exe install colorama==0.3.7
- ps: C:\Python27\Scripts\pip.exe install dnspython==1.12.0
- ps: C:\Python27\Scripts\pip.exe install ecdsa==0.13
- ps: C:\Python27\Scripts\pip.exe install jsonrpc==1.2
- ps: C:\Python27\Scripts\pip.exe install jsonrpclib==0.1.7
- ps: C:\Python27\Scripts\pip.exe install loggly-python-handler==1.0.0
- ps: C:\Python27\Scripts\pip.exe install pbkdf2==1.3
- ps: C:\Python27\Scripts\pip.exe install protobuf==3.0.0
- ps: C:\Python27\Scripts\pip.exe install pycrypto==2.6.1
- ps: C:\Python27\Scripts\pip.exe install python-bitcoinrpc==0.1
- ps: C:\Python27\Scripts\pip.exe install qrcode==5.2.2
- ps: C:\Python27\Scripts\pip.exe install requests_futures==0.9.7
- ps: C:\Python27\Scripts\pip.exe install seccure==0.3.1.3
- ps: C:\Python27\Scripts\pip.exe install simplejson==3.8.2
- ps: C:\Python27\Scripts\pip.exe install slowaes==0.1a1
- ps: C:\Python27\Scripts\pip.exe install txJSON-RPC==0.3.1
- ps: C:\Python27\Scripts\pip.exe install unqlite==0.5.3
- ps: C:\Python27\Scripts\pip.exe install wsgiref==0.1.2
- ps: C:\Python27\Scripts\pip.exe install base58==0.2.2
- ps: C:\Python27\Scripts\pip.exe install googlefinance==0.7
- ps: C:\Python27\Scripts\pip.exe install git+https://github.com/lbryio/lbryum.git
- ps: cd C:\projects\lbry
build_script:
- cmd: C:\Python27\python.exe setup_win32.py build bdist_msi
artifacts:
- path: dist/*.msi
name: msi
- path: build/exe.win32-2.7/
name: lbry-portable

View file

@ -1,2 +1,2 @@
__version__ = "0.3.19"
__version__ = "0.3.21"
version = tuple(__version__.split('.'))

View file

@ -1,6 +1,7 @@
"""
Some network wide and also application specific parameters
"""
import os
MAX_HANDSHAKE_SIZE = 2**16
@ -28,6 +29,8 @@ SEARCH_SERVERS = ["http://lighthouse1.lbry.io:50005",
"http://lighthouse2.lbry.io:50005",
"http://lighthouse3.lbry.io:50005"]
REFLECTOR_SERVERS = [("reflector.lbry.io", 5566)]
LOG_FILE_NAME = "lbrynet.log"
LOG_POST_URL = "https://lbry.io/log-upload"
@ -36,7 +39,10 @@ CRYPTSD_FILE_EXTENSION = ".cryptsd"
API_INTERFACE = "localhost"
API_ADDRESS = "lbryapi"
API_PORT = 5279
ICON_PATH = "app.icns"
if os.name == "nt":
ICON_PATH = "icons"
else:
ICON_PATH = "app.icns"
APP_NAME = "LBRY"
API_CONNECTION_STRING = "http://%s:%i/%s" % (API_INTERFACE, API_PORT, API_ADDRESS)
UI_ADDRESS = "http://%s:%i" % (API_INTERFACE, API_PORT)

View file

@ -289,7 +289,7 @@ class LBRYWallet(object):
d = self._do_send_many(payments_to_send)
d.addCallback(lambda txid: log.debug("Sent transaction %s", txid))
return d
log.info("There were no payments to send")
log.debug("There were no payments to send")
return defer.succeed(True)
def get_stream_info_for_name(self, name):
@ -489,6 +489,33 @@ class LBRYWallet(object):
d.addCallback(self._get_decoded_tx)
return d
def get_history(self):
d = self._get_history()
return d
def get_tx_json(self, txid):
def _decode(raw_tx):
tx = Transaction(raw_tx).deserialize()
decoded_tx = {}
for txkey in tx.keys():
if isinstance(tx[txkey], list):
decoded_tx[txkey] = []
for i in tx[txkey]:
tmp = {}
for k in i.keys():
if isinstance(i[k], Decimal):
tmp[k] = float(i[k] / 1e8)
else:
tmp[k] = i[k]
decoded_tx[txkey].append(tmp)
else:
decoded_tx[txkey] = tx[txkey]
return decoded_tx
d = self._get_raw_tx(txid)
d.addCallback(_decode)
return d
def get_name_and_validity_for_sd_hash(self, sd_hash):
d = self._get_claim_metadata_for_sd_hash(sd_hash)
d.addCallback(lambda name_txid: self._get_status_of_claim(name_txid[1], name_txid[0], sd_hash) if name_txid is not None else None)
@ -688,6 +715,9 @@ class LBRYWallet(object):
def _get_balance_for_address(self, address):
return defer.fail(NotImplementedError())
def _get_history(self):
return defer.fail(NotImplementedError())
def _start(self):
pass
@ -823,6 +853,9 @@ class LBRYcrdWallet(LBRYWallet):
def _get_value_for_name(self, name):
return threads.deferToThread(self._get_value_for_name_rpc, name)
def _get_history(self):
return threads.deferToThread(self._list_transactions_rpc)
def _get_rpc_conn(self):
return AuthServiceProxy(self.rpc_conn_string)
@ -979,7 +1012,7 @@ class LBRYcrdWallet(LBRYWallet):
@_catch_connection_error
def _update_name_rpc(self, txid, value, amount):
rpc_conn = self._get_rpc_conn()
return rpc_conn.updateclaim(txid, value, amount)
return rpc_conn.updateclaim(txid, json.dumps(value), amount)
@_catch_connection_error
def _send_name_claim_rpc(self, name, value, amount):
@ -1007,6 +1040,11 @@ class LBRYcrdWallet(LBRYWallet):
rpc_conn = self._get_rpc_conn()
return rpc_conn.getbestblockhash()
@_catch_connection_error
def _list_transactions_rpc(self):
rpc_conn = self._get_rpc_conn()
return rpc_conn.listtransactions()
@_catch_connection_error
def _stop_rpc(self):
# check if our lbrycrdd is actually running, or if we connected to one that was already
@ -1294,34 +1332,11 @@ class LBRYumWallet(LBRYWallet):
func = getattr(self.cmd_runner, cmd.name)
return threads.deferToThread(func)
def get_history(self):
def _get_history(self):
cmd = known_commands['history']
func = getattr(self.cmd_runner, cmd.name)
return threads.deferToThread(func)
def get_tx_json(self, txid):
def _decode(raw_tx):
tx = Transaction(raw_tx).deserialize()
decoded_tx = {}
for txkey in tx.keys():
if isinstance(tx[txkey], list):
decoded_tx[txkey] = []
for i in tx[txkey]:
tmp = {}
for k in i.keys():
if isinstance(i[k], Decimal):
tmp[k] = float(i[k] / 1e8)
else:
tmp[k] = i[k]
decoded_tx[txkey].append(tmp)
else:
decoded_tx[txkey] = tx[txkey]
return decoded_tx
d = self._get_raw_tx(txid)
d.addCallback(_decode)
return d
def get_pub_keys(self, wallet):
cmd = known_commands['getpubkeys']
func = getattr(self.cmd_runner, cmd.name)

View file

@ -73,7 +73,7 @@ class ClientProtocol(Protocol):
return defer.fail(failure.Failure(ValueError("There is already a request for that response active")))
self._next_request.update(request.request_dict)
d = defer.Deferred()
log.debug("Adding a request. Request: %s", str(request))
log.debug("Adding a request. Request: %s", str(request.request_dict))
self._response_deferreds[request.response_identifier] = d
return d

View file

@ -172,6 +172,7 @@ class ConnectionManager(object):
def pick_best_peer(peers):
# TODO: Eventually rank them based on past performance/reputation. For now
# TODO: just pick the first to which we don't have an open connection
log.debug("Got a list of peers to choose from: %s", str(peers))
if peers is None:
return None

View file

@ -76,7 +76,16 @@ def disable_third_party_loggers():
def disable_noisy_loggers():
logging.getLogger('BitcoinRPC').setLevel(logging.INFO)
logging.getLogger('lbrynet.analytics.api').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.client.ConnectionManager').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.client.BlobRequester').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.client.ClientProtocol').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.server.ServerRequestHandler').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.server.ServerProtocol').setLevel(logging.INFO)
logging.getLogger('lbrynet.core.server.BlobAvailabilityHandler').setLevel(logging.INFO)
logging.getLogger('lbrynet.dht').setLevel(logging.INFO)
logging.getLogger('lbrynet.lbrynet_daemon.LBRYExchangeRateManager').setLevel(logging.INFO)
@_log_decorator

View file

@ -44,6 +44,7 @@ class BlobAvailabilityHandler(object):
d = self._get_available_blobs(queries[self.query_identifiers[0]])
def set_field(available_blobs):
log.debug("available blobs: %s", str(available_blobs))
return {'available_blobs': available_blobs}
d.addCallback(set_field)

View file

@ -1,9 +1,12 @@
import logging
import os
def migrate_db(db_dir, start, end):
current = start
old_dirs = []
if os.name == "nt":
return old_dirs
while current < end:
if current == 0:
from lbrynet.db_migrator.migrate0to1 import do_migration

View file

@ -0,0 +1,2 @@
from lbrynet.lbryfile.StreamDescriptor import get_sd_info
from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob

View file

@ -48,7 +48,7 @@ from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, \
KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, DEFAULT_WALLET, \
DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH, \
LOG_POST_URL, LOG_FILE_NAME
LOG_POST_URL, LOG_FILE_NAME, REFLECTOR_SERVERS
from lbrynet.conf import DEFAULT_SD_DOWNLOAD_TIMEOUT
from lbrynet.conf import DEFAULT_TIMEOUT
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader
@ -57,7 +57,7 @@ from lbrynet.core.PTCWallet import PTCWallet
from lbrynet.core.LBRYWallet import LBRYcrdWallet, LBRYumWallet
from lbrynet.lbryfilemanager.LBRYFileManager import LBRYFileManager
from lbrynet.lbryfile.LBRYFileMetadataManager import DBLBRYFileMetadataManager, TempLBRYFileMetadataManager
# from lbryum import LOG_PATH as lbryum_log
from lbrynet import reflector
# TODO: this code snippet is everywhere. Make it go away
@ -180,6 +180,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
default_download_directory = get_path(FOLDERID.Downloads, UserHandle.current)
self.db_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbrynet")
try:
os.makedirs(self.db_dir)
except OSError:
if not os.path.isdir(self.db_dir):
raise
elif sys.platform == "darwin":
default_download_directory = os.path.join(os.path.expanduser("~"), 'Downloads')
self.db_dir = user_data_dir("LBRY")
@ -210,9 +215,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
'delete_blobs_on_remove': True,
'peer_port': 3333,
'dht_node_port': 4444,
'reflector_port': 5566,
'use_upnp': True,
'start_lbrycrdd': True,
'requested_first_run_credits': False,
'run_reflector_server': False,
'cache_time': DEFAULT_CACHE_TIME,
'startup_scripts': [],
'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}
@ -272,6 +279,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.search_timeout = self.session_settings['search_timeout']
self.download_timeout = self.session_settings['download_timeout']
self.max_search_results = self.session_settings['max_search_results']
self.run_reflector_server = self.session_settings['run_reflector_server']
####
#
# Ignore the saved wallet type. Some users will have their wallet type
@ -298,6 +306,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
####
self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove']
self.peer_port = self.session_settings['peer_port']
self.reflector_port = self.session_settings['reflector_port']
self.dht_node_port = self.session_settings['dht_node_port']
self.use_upnp = self.session_settings['use_upnp']
self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
@ -682,10 +691,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
def _start_server(self):
if self.peer_port is not None:
server_factory = ServerProtocolFactory(self.session.rate_limiter,
self.query_handlers,
self.session.peer_manager)
try:
self.lbry_server_port = reactor.listenTCP(self.peer_port, server_factory)
except error.CannotListenError as e:
@ -694,6 +703,33 @@ class LBRYDaemon(jsonrpc.JSONRPC):
raise ValueError("%s lbrynet may already be running on your computer.", str(e))
return defer.succeed(True)
def _start_reflector(self):
if self.run_reflector_server:
log.info("Starting reflector server")
if self.reflector_port is not None:
reflector_factory = reflector.ServerFactory(
self.session.peer_manager,
self.session.blob_manager
)
try:
self.reflector_server_port = reactor.listenTCP(self.reflector_port, reflector_factory)
log.info('Started reflector on port %s', self.reflector_port)
except error.CannotListenError as e:
log.exception("Couldn't bind reflector to port %d", self.reflector_port)
raise ValueError("{} lbrynet may already be running on your computer.".format(e))
return defer.succeed(True)
def _stop_reflector(self):
if self.run_reflector_server:
log.info("Stopping reflector server")
try:
if self.reflector_server_port is not None:
self.reflector_server_port, p = None, self.reflector_server_port
return defer.maybeDeferred(p.stopListening)
except AttributeError:
return defer.succeed(True)
return defer.succeed(True)
def _stop_server(self):
try:
if self.lbry_server_port is not None:
@ -707,7 +743,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
def _setup_server(self):
def restore_running_status(running):
if running is True:
return self._start_server()
d = self._start_server()
d.addCallback(lambda _: self._start_reflector())
return defer.succeed(True)
self.startup_status = STARTUP_STAGES[4]
@ -807,6 +844,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
d.addCallback(lambda _: self._stop_server())
d.addCallback(lambda _: self._stop_reflector())
d.addErrback(lambda err: True)
d.addCallback(lambda _: self.lbry_file_manager.stop())
d.addErrback(lambda err: True)
@ -1306,6 +1344,30 @@ class LBRYDaemon(jsonrpc.JSONRPC):
d = defer.DeferredList([self._get_lbry_file('sd_hash', l.sd_hash) for l in self.lbry_file_manager.lbry_files])
return d
def _reflect(self, lbry_file):
if not lbry_file:
return defer.fail(Exception("no lbry file given to reflect"))
stream_hash = lbry_file.stream_hash
if stream_hash is None:
return defer.fail(Exception("no stream hash"))
log.info("Reflecting stream: %s" % stream_hash)
reflector_server = random.choice(REFLECTOR_SERVERS)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Start reflector client")
factory = reflector.ClientFactory(
self.session.blob_manager,
self.lbry_file_manager.stream_info_manager,
stream_hash
)
d = reactor.resolve(reflector_address)
d.addCallback(lambda ip: reactor.connectTCP(ip, reflector_port, factory))
d.addCallback(lambda _: factory.finished_deferred)
return d
def _log_to_slack(self, msg):
URL = "https://hooks.slack.com/services/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"
msg = platform.platform() + ": " + base58.b58encode(self.lbryid)[:20] + ", " + msg
@ -1880,6 +1942,12 @@ class LBRYDaemon(jsonrpc.JSONRPC):
m['fee'][currency]['address'] = address
return m
def _reflect_if_possible(sd_hash, txid):
d = self._get_lbry_file('sd_hash', sd_hash, return_json=False)
d.addCallback(self._reflect)
d.addCallback(lambda _: txid)
return d
name = p['name']
log.info("Publish: ")
@ -1896,8 +1964,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
try:
metadata = Metadata(p['metadata'])
make_lbry_file = False
sd_hash = metadata['sources']['lbry_sd_hash']
except AssertionError:
make_lbry_file = True
sd_hash = None
metadata = p['metadata']
file_path = p['file_path']
@ -1921,6 +1991,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
d.addCallback(lambda meta: pub.start(name, file_path, bid, meta))
else:
d.addCallback(lambda meta: self.session.wallet.claim_name(name, bid, meta))
if sd_hash:
d.addCallback(lambda txid: _reflect_if_possible(sd_hash, txid))
d.addCallback(lambda txid: self._add_to_pending_claims(name, txid))
d.addCallback(lambda r: self._render_response(r, OK_CODE))
@ -2350,6 +2423,45 @@ class LBRYDaemon(jsonrpc.JSONRPC):
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_announce_all_blobs_to_dht(self):
"""
Announce all blobs to the dht
Args:
None
Returns:
"""
d = self.session.blob_manager.immediate_announce_all_blobs()
d.addCallback(lambda _: self._render_response("Announced", OK_CODE))
return d
def jsonrpc_reflect(self, p):
"""
Reflect a stream
Args:
sd_hash
Returns:
True or traceback
"""
sd_hash = p['sd_hash']
d = self._get_lbry_file('sd_hash', sd_hash, return_json=False)
d.addCallback(self._reflect)
d.addCallbacks(lambda _: self._render_response(True, OK_CODE), lambda err: self._render_response(err.getTraceback(), OK_CODE))
return d
def jsonrpc_get_blobs(self):
"""
return all blobs
"""
d = defer.succeed(self.session.blob_manager.blobs)
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def get_lbrynet_version_from_github():
"""Return the latest released version from github."""

View file

@ -4,7 +4,7 @@ import json
from lbrynet.conf import API_CONNECTION_STRING
from jsonrpc.proxy import JSONRPCProxy
help_msg = "Useage: lbrynet-cli method json-args\n" \
help_msg = "Usage: lbrynet-cli method json-args\n" \
+ "Examples: " \
+ "lbrynet-cli resolve_name '{\"name\": \"what\"}'\n" \
+ "lbrynet-cli get_balance\n" \

View file

@ -31,6 +31,9 @@ log = logging.getLogger(__name__)
REMOTE_SERVER = "www.google.com"
if getattr(sys, 'frozen', False) and os.name == "nt":
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(os.path.dirname(sys.executable), "cacert.pem")
def test_internet_connection():
try:

View file

@ -377,6 +377,22 @@ class LBRYFileUpload(resource.Resource):
# Move to a new temporary dir and restore the original file name
newdirpath = tempfile.mkdtemp()
newpath = os.path.join(newdirpath, origfilename)
if os.name == "nt":
shutil.copy(uploaded_file.name, newpath)
# TODO Still need to remove the file
# TODO deal with pylint error in cleaner fashion than this
try:
from exceptions import WindowsError as win_except
except ImportError as e:
log.error("This shouldn't happen")
win_except = Exception
try:
os.remove(uploaded_file.name)
except win_except as e:
pass
else:
shutil.move(uploaded_file.name, newpath)
self._api.uploaded_temp_files.append(newpath)

View file

@ -47,7 +47,7 @@ class MarketFeed(object):
return defer.succeed(from_amount / (1.0 - self.fee))
def _save_price(self, price):
log.info("Saving price update %f for %s" % (price, self.market))
log.debug("Saving price update %f for %s" % (price, self.market))
self.rate = ExchangeRate(self.market, price, int(time.time()))
def _update_price(self):
@ -191,7 +191,7 @@ class DummyExchangeRateManager(object):
feed.rate = ExchangeRate(feed.market, rates[feed.market]['spot'], rates[feed.market]['ts'])
def convert_currency(self, from_currency, to_currency, amount):
log.info("Converting %f %s to %s" % (amount, from_currency, to_currency))
log.debug("Converting %f %s to %s" % (amount, from_currency, to_currency))
for market in self.market_feeds:
if market.rate.currency_pair == (from_currency, to_currency):
return amount * market.rate.spot

View file

@ -2,6 +2,7 @@ import logging
import mimetypes
import os
import sys
import random
from appdirs import user_data_dir
@ -11,8 +12,9 @@ from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob
from lbrynet.core.PaymentRateManager import PaymentRateManager
from lbrynet.metadata.LBRYMetadata import Metadata
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloader
from lbrynet.conf import LOG_FILE_NAME
from twisted.internet import threads, defer
from lbrynet import reflector
from lbrynet.conf import LOG_FILE_NAME, REFLECTOR_SERVERS
from twisted.internet import threads, defer, reactor
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
@ -40,12 +42,14 @@ class Publisher(object):
self.lbry_file = None
self.txid = None
self.stream_hash = None
reflector_server = random.choice(REFLECTOR_SERVERS)
self.reflector_server, self.reflector_port = reflector_server[0], reflector_server[1]
self.metadata = {}
def start(self, name, file_path, bid, metadata):
log.info('Starting publish for %s', name)
def _show_result():
log.info("Published %s --> lbry://%s txid: %s", self.file_name, self.publish_name, self.txid)
log.info("Success! Published %s --> lbry://%s txid: %s", self.file_name, self.publish_name, self.txid)
return defer.succeed(self.txid)
self.publish_name = name
@ -53,17 +57,37 @@ class Publisher(object):
self.bid_amount = bid
self.metadata = metadata
if os.name == "nt":
file_mode = 'rb'
else:
file_mode = 'r'
d = self._check_file_path(self.file_path)
d.addCallback(lambda _: create_lbry_file(self.session, self.lbry_file_manager,
self.file_name, open(self.file_path)))
self.file_name, open(self.file_path, file_mode)))
d.addCallback(self.add_to_lbry_files)
d.addCallback(lambda _: self._create_sd_blob())
d.addCallback(lambda _: self._claim_name())
d.addCallback(lambda _: self.set_status())
d.addCallback(lambda _: self.start_reflector())
d.addCallbacks(lambda _: _show_result(), self._show_publish_error)
return d
def start_reflector(self):
reflector_server = random.choice(REFLECTOR_SERVERS)
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
log.info("Reflecting new publication")
factory = reflector.ClientFactory(
self.session.blob_manager,
self.lbry_file_manager.stream_info_manager,
self.stream_hash
)
d = reactor.resolve(reflector_address)
d.addCallback(lambda ip: reactor.connectTCP(ip, reflector_port, factory))
d.addCallback(lambda _: factory.finished_deferred)
return d
def _check_file_path(self, file_path):
def check_file_threaded():
f = open(file_path)
@ -84,10 +108,13 @@ class Publisher(object):
return d
def _create_sd_blob(self):
d = publish_sd_blob(self.lbry_file_manager.stream_info_manager, self.session.blob_manager,
log.debug('Creating stream descriptor blob')
d = publish_sd_blob(self.lbry_file_manager.stream_info_manager,
self.session.blob_manager,
self.lbry_file.stream_hash)
def set_sd_hash(sd_hash):
log.debug('stream descriptor hash: %s', sd_hash)
if 'sources' not in self.metadata:
self.metadata['sources'] = {}
self.metadata['sources']['lbry_sd_hash'] = sd_hash
@ -96,23 +123,29 @@ class Publisher(object):
return d
def set_status(self):
log.debug('Setting status')
d = self.lbry_file_manager.change_lbry_file_status(self.lbry_file, ManagedLBRYFileDownloader.STATUS_FINISHED)
d.addCallback(lambda _: self.lbry_file.restore())
return d
def _claim_name(self):
self.metadata['content_type'] = mimetypes.guess_type(os.path.join(self.lbry_file.download_directory,
self.lbry_file.file_name))[0]
self.metadata['ver'] = Metadata.current_version
log.debug('Claiming name')
self._update_metadata()
m = Metadata(self.metadata)
def set_tx_hash(txid):
log.debug('Name claimed using txid: %s', txid)
self.txid = txid
d = self.wallet.claim_name(self.publish_name, self.bid_amount, m)
d.addCallback(set_tx_hash)
return d
def _update_metadata(self):
filename = os.path.join(self.lbry_file.download_directory, self.lbry_file.file_name)
self.metadata['content_type'] = get_content_type(filename)
self.metadata['ver'] = Metadata.current_version
def _show_publish_error(self, err):
log.info(err.getTraceback())
message = "An error occurred publishing %s to %s. Error: %s."
@ -125,3 +158,7 @@ class Publisher(object):
log.error(message, str(self.file_name), str(self.publish_name), err.getTraceback())
return defer.fail(Exception("Publish failed"))
def get_content_type(filename):
return mimetypes.guess_type(filename)[0]

View file

@ -0,0 +1,2 @@
from lbrynet.reflector.server.server import ReflectorServerFactory as ServerFactory
from lbrynet.reflector.client.client import LBRYFileReflectorClientFactory as ClientFactory

View file

@ -0,0 +1,269 @@
"""
The reflector protocol (all dicts encoded in json):
Client Handshake (sent once per connection, at the start of the connection):
{
'version': 0,
}
Server Handshake (sent once per connection, after receiving the client handshake):
{
'version': 0,
}
Client Info Request:
{
'blob_hash': "<blob_hash>",
'blob_size': <blob_size>
}
Server Info Response (sent in response to Client Info Request):
{
'send_blob': True|False
}
If response is 'YES', client may send a Client Blob Request or a Client Info Request.
If response is 'NO', client may only send a Client Info Request
Client Blob Request:
{} # Yes, this is an empty dictionary, in case something needs to go here in the future
<raw blob_data> # this blob data must match the info sent in the most recent Client Info Request
Server Blob Response (sent in response to Client Blob Request):
{
'received_blob': True
}
Client may now send another Client Info Request
"""
import json
import logging
from twisted.protocols.basic import FileSender
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import defer, error
log = logging.getLogger(__name__)
class IncompleteResponseError(Exception):
pass
class LBRYFileReflectorClient(Protocol):
# Protocol stuff
def connectionMade(self):
self.blob_manager = self.factory.blob_manager
self.response_buff = ''
self.outgoing_buff = ''
self.blob_hashes_to_send = []
self.next_blob_to_send = None
self.blob_read_handle = None
self.received_handshake_response = False
self.protocol_version = None
self.file_sender = None
self.producer = None
self.streaming = False
d = self.get_blobs_to_send(self.factory.stream_info_manager, self.factory.stream_hash)
d.addCallback(lambda _: self.send_handshake())
d.addErrback(lambda err: log.warning("An error occurred immediately: %s", err.getTraceback()))
def dataReceived(self, data):
log.debug('Recieved %s', data)
self.response_buff += data
try:
msg = self.parse_response(self.response_buff)
except IncompleteResponseError:
pass
else:
self.response_buff = ''
d = self.handle_response(msg)
d.addCallback(lambda _: self.send_next_request())
d.addErrback(self.response_failure_handler)
def connectionLost(self, reason):
if reason.check(error.ConnectionDone):
log.debug('Finished sending data via reflector')
self.factory.finished_deferred.callback(True)
else:
log.debug('reflector finished: %s', reason)
self.factory.finished_deferred.callback(reason)
# IConsumer stuff
def registerProducer(self, producer, streaming):
self.producer = producer
self.streaming = streaming
if self.streaming is False:
from twisted.internet import reactor
reactor.callLater(0, self.producer.resumeProducing)
def unregisterProducer(self):
self.producer = None
def write(self, data):
self.transport.write(data)
if self.producer is not None and self.streaming is False:
from twisted.internet import reactor
reactor.callLater(0, self.producer.resumeProducing)
def get_blobs_to_send(self, stream_info_manager, stream_hash):
log.debug('Getting blobs from stream hash: %s', stream_hash)
d = stream_info_manager.get_blobs_for_stream(stream_hash)
def set_blobs(blob_hashes):
for blob_hash, position, iv, length in blob_hashes:
log.info("Preparing to send %s", blob_hash)
if blob_hash is not None:
self.blob_hashes_to_send.append(blob_hash)
d.addCallback(set_blobs)
d.addCallback(lambda _: stream_info_manager.get_sd_blob_hashes_for_stream(stream_hash))
def set_sd_blobs(sd_blob_hashes):
for sd_blob_hash in sd_blob_hashes:
self.blob_hashes_to_send.append(sd_blob_hash)
d.addCallback(set_sd_blobs)
return d
def send_handshake(self):
log.debug('Sending handshake')
self.write(json.dumps({'version': 0}))
def parse_response(self, buff):
try:
return json.loads(buff)
except ValueError:
raise IncompleteResponseError()
def response_failure_handler(self, err):
log.warning("An error occurred handling the response: %s", err.getTraceback())
def handle_response(self, response_dict):
if self.received_handshake_response is False:
return self.handle_handshake_response(response_dict)
else:
return self.handle_normal_response(response_dict)
def set_not_uploading(self):
if self.next_blob_to_send is not None:
self.next_blob_to_send.close_read_handle(self.read_handle)
self.read_handle = None
self.next_blob_to_send = None
self.file_sender = None
return defer.succeed(None)
def start_transfer(self):
self.write(json.dumps({}))
assert self.read_handle is not None, "self.read_handle was None when trying to start the transfer"
d = self.file_sender.beginFileTransfer(self.read_handle, self)
return d
def handle_handshake_response(self, response_dict):
if 'version' not in response_dict:
raise ValueError("Need protocol version number!")
self.protocol_version = int(response_dict['version'])
if self.protocol_version != 0:
raise ValueError("I can't handle protocol version {}!".format(self.protocol_version))
self.received_handshake_response = True
return defer.succeed(True)
def handle_normal_response(self, response_dict):
if self.file_sender is None: # Expecting Server Info Response
if 'send_blob' not in response_dict:
raise ValueError("I don't know whether to send the blob or not!")
if response_dict['send_blob'] is True:
self.file_sender = FileSender()
return defer.succeed(True)
else:
return self.set_not_uploading()
else: # Expecting Server Blob Response
if 'received_blob' not in response_dict:
raise ValueError("I don't know if the blob made it to the intended destination!")
else:
return self.set_not_uploading()
def open_blob_for_reading(self, blob):
if blob.is_validated():
read_handle = blob.open_for_reading()
if read_handle is not None:
log.debug('Getting ready to send %s', blob.blob_hash)
self.next_blob_to_send = blob
self.read_handle = read_handle
return None
raise ValueError("Couldn't open that blob for some reason. blob_hash: {}".format(blob.blob_hash))
def send_blob_info(self):
log.info("Send blob info for %s", self.next_blob_to_send.blob_hash)
assert self.next_blob_to_send is not None, "need to have a next blob to send at this point"
log.debug('sending blob info')
self.write(json.dumps({
'blob_hash': self.next_blob_to_send.blob_hash,
'blob_size': self.next_blob_to_send.length
}))
def send_next_request(self):
if self.file_sender is not None:
# send the blob
log.debug('Sending the blob')
return self.start_transfer()
elif self.blob_hashes_to_send:
# open the next blob to send
blob_hash = self.blob_hashes_to_send[0]
log.debug('No current blob, sending the next one: %s', blob_hash)
self.blob_hashes_to_send = self.blob_hashes_to_send[1:]
d = self.blob_manager.get_blob(blob_hash, True)
d.addCallback(self.open_blob_for_reading)
# send the server the next blob hash + length
d.addCallback(lambda _: self.send_blob_info())
return d
else:
# close connection
log.debug('No more blob hashes, closing connection')
self.transport.loseConnection()
class LBRYFileReflectorClientFactory(ClientFactory):
protocol = LBRYFileReflectorClient
def __init__(self, blob_manager, stream_info_manager, stream_hash):
self.blob_manager = blob_manager
self.stream_info_manager = stream_info_manager
self.stream_hash = stream_hash
self.p = None
self.finished_deferred = defer.Deferred()
def buildProtocol(self, addr):
p = self.protocol()
p.factory = self
self.p = p
return p
def startFactory(self):
log.debug('Starting reflector factory')
ClientFactory.startFactory(self)
def startedConnecting(self, connector):
log.debug('Started connecting')
def clientConnectionLost(self, connector, reason):
"""If we get disconnected, reconnect to server."""
log.debug("connection lost: %s", reason)
def clientConnectionFailed(self, connector, reason):
log.debug("connection failed: %s", reason)

View file

@ -0,0 +1,148 @@
import logging
from twisted.python import failure
from twisted.internet import error, defer
from twisted.internet.protocol import Protocol, ServerFactory
import json
from lbrynet.core.utils import is_valid_blobhash
log = logging.getLogger(__name__)
class ReflectorServer(Protocol):
def connectionMade(self):
peer_info = self.transport.getPeer()
log.debug('Connection made to %s', peer_info)
self.peer = self.factory.peer_manager.get_peer(peer_info.host, peer_info.port)
self.blob_manager = self.factory.blob_manager
self.received_handshake = False
self.peer_version = None
self.receiving_blob = False
self.incoming_blob = None
self.blob_write = None
self.blob_finished_d = None
self.cancel_write = None
self.request_buff = ""
def connectionLost(self, reason=failure.Failure(error.ConnectionDone())):
log.info("Reflector upload from %s finished" % self.peer.host)
def dataReceived(self, data):
if self.receiving_blob:
# log.debug('Writing data to blob')
self.blob_write(data)
else:
log.debug('Not yet recieving blob, data needs further processing')
self.request_buff += data
msg, extra_data = self._get_valid_response(self.request_buff)
if msg is not None:
self.request_buff = ''
d = self.handle_request(msg)
d.addCallbacks(self.send_response, self.handle_error)
if self.receiving_blob and extra_data:
log.debug('Writing extra data to blob')
self.blob_write(extra_data)
def _get_valid_response(self, response_msg):
extra_data = None
response = None
curr_pos = 0
while True:
next_close_paren = response_msg.find('}', curr_pos)
if next_close_paren != -1:
curr_pos = next_close_paren + 1
try:
response = json.loads(response_msg[:curr_pos])
except ValueError:
if curr_pos > 100:
raise Exception("error decoding response")
else:
pass
else:
extra_data = response_msg[curr_pos:]
break
else:
break
return response, extra_data
def handle_request(self, request_dict):
if self.received_handshake is False:
return self.handle_handshake(request_dict)
else:
return self.handle_normal_request(request_dict)
def handle_handshake(self, request_dict):
log.debug('Handling handshake')
if 'version' not in request_dict:
raise ValueError("Client should send version")
self.peer_version = int(request_dict['version'])
if self.peer_version != 0:
raise ValueError("I don't know that version!")
self.received_handshake = True
return defer.succeed({'version': 0})
def determine_blob_needed(self, blob):
if blob.is_validated():
return {'send_blob': False}
else:
self.incoming_blob = blob
self.blob_finished_d, self.blob_write, self.cancel_write = blob.open_for_writing(self.peer)
self.blob_finished_d.addCallback(lambda _ :self.blob_manager.blob_completed(blob))
return {'send_blob': True}
def close_blob(self):
self.blob_finished_d = None
self.blob_write = None
self.cancel_write = None
self.incoming_blob = None
self.receiving_blob = False
def handle_normal_request(self, request_dict):
if self.blob_write is None:
# we haven't opened a blob yet, meaning we must be waiting for the
# next message containing a blob hash and a length. this message
# should be it. if it's one we want, open the blob for writing, and
# return a nice response dict (in a Deferred) saying go ahead
if not 'blob_hash' in request_dict or not 'blob_size' in request_dict:
raise ValueError("Expected a blob hash and a blob size")
if not is_valid_blobhash(request_dict['blob_hash']):
raise ValueError("Got a bad blob hash: {}".format(request_dict['blob_hash']))
log.debug('Recieved info for blob: %s', request_dict['blob_hash'])
d = self.blob_manager.get_blob(
request_dict['blob_hash'],
True,
int(request_dict['blob_size'])
)
d.addCallback(self.determine_blob_needed)
else:
# we have a blob open already, so this message should have nothing
# important in it. to the deferred that fires when the blob is done,
# add a callback which returns a nice response dict saying to keep
# sending, and then return that deferred
log.debug('blob is already open')
self.receiving_blob = True
d = self.blob_finished_d
d.addCallback(lambda _: self.close_blob())
d.addCallback(lambda _: {'received_blob': True})
return d
def send_response(self, response_dict):
self.transport.write(json.dumps(response_dict))
def handle_error(self, err):
log.error(err.getTraceback())
self.transport.loseConnection()
class ReflectorServerFactory(ServerFactory):
protocol = ReflectorServer
def __init__(self, peer_manager, blob_manager):
self.peer_manager = peer_manager
self.blob_manager = blob_manager
def buildProtocol(self, addr):
log.debug('Creating a protocol for %s', addr)
return ServerFactory.buildProtocol(self, addr)

View file

@ -1,5 +1,5 @@
[Desktop Entry]
Version=0.3.19
Version=0.3.21
Name=LBRY
Comment=The world's first user-owned content marketplace
Icon=lbry

View file

@ -0,0 +1,287 @@
import logging
import os
import socket
import sys
import threading
import webbrowser
from twisted.internet import reactor, error
from twisted.web import server
import win32api
import win32con
import win32gui_struct
try:
import winxpgui as win32gui
except ImportError:
import win32gui
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer, LBRYDaemonRequest
from lbrynet.conf import API_PORT, API_INTERFACE, ICON_PATH, APP_NAME
from lbrynet.conf import UI_ADDRESS
if getattr(sys, 'frozen', False) and os.name == "nt":
os.environ["REQUESTS_CA_BUNDLE"] = os.path.join(os.path.dirname(sys.executable), "cacert.pem")
log = logging.getLogger(__name__)
REMOTE_SERVER = "www.google.com"
def test_internet_connection():
try:
host = socket.gethostbyname(REMOTE_SERVER)
s = socket.create_connection((host, 80), 2)
return True
except:
return False
def non_string_iterable(obj):
try:
iter(obj)
except TypeError:
return False
else:
return not isinstance(obj, basestring)
class SysTrayIcon(object):
"""TODO"""
QUIT = 'QUIT'
SPECIAL_ACTIONS = [QUIT]
FIRST_ID = 1023
def __init__(self,
icon,
hover_text,
menu_options,
on_quit=None,
default_menu_index=None,
window_class_name=None, ):
self.icon = icon
self.hover_text = hover_text
self.on_quit = on_quit
menu_options = menu_options + (('Quit', None, self.QUIT),)
self._next_action_id = self.FIRST_ID
self.menu_actions_by_id = set()
self.menu_options = self._add_ids_to_menu_options(list(menu_options))
self.menu_actions_by_id = dict(self.menu_actions_by_id)
del self._next_action_id
self.default_menu_index = (default_menu_index or 0)
self.window_class_name = window_class_name or "SysTrayIconPy"
message_map = {win32gui.RegisterWindowMessage("TaskbarCreated"): self.restart,
win32con.WM_DESTROY: self.destroy,
win32con.WM_COMMAND: self.command,
win32con.WM_USER + 20: self.notify,}
# Register the Window class.
window_class = win32gui.WNDCLASS()
hinst = window_class.hInstance = win32gui.GetModuleHandle(None)
window_class.lpszClassName = self.window_class_name
window_class.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW;
window_class.hCursor = win32gui.LoadCursor(0, win32con.IDC_ARROW)
window_class.hbrBackground = win32con.COLOR_WINDOW
window_class.lpfnWndProc = message_map # could also specify a wndproc.
classAtom = win32gui.RegisterClass(window_class)
# Create the Window.
style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU
self.hwnd = win32gui.CreateWindow(classAtom,
self.window_class_name,
style,
0,
0,
win32con.CW_USEDEFAULT,
win32con.CW_USEDEFAULT,
0,
0,
hinst,
None)
win32gui.UpdateWindow(self.hwnd)
self.notify_id = None
self.refresh_icon()
win32gui.PumpMessages()
def _add_ids_to_menu_options(self, menu_options):
result = []
for menu_option in menu_options:
option_text, option_icon, option_action = menu_option
if callable(option_action) or option_action in self.SPECIAL_ACTIONS:
self.menu_actions_by_id.add((self._next_action_id, option_action))
result.append(menu_option + (self._next_action_id,))
elif non_string_iterable(option_action):
result.append((option_text,
option_icon,
self._add_ids_to_menu_options(option_action),
self._next_action_id))
else:
print 'Unknown item', option_text, option_icon, option_action
self._next_action_id += 1
return result
def refresh_icon(self):
# Try and find a custom icon
hinst = win32gui.GetModuleHandle(None)
if os.path.isfile(self.icon):
icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
hicon = win32gui.LoadImage(hinst,
self.icon,
win32con.IMAGE_ICON,
0,
0,
icon_flags)
else:
print "Can't find icon file - using default."
hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION)
if self.notify_id:
message = win32gui.NIM_MODIFY
else:
message = win32gui.NIM_ADD
self.notify_id = (self.hwnd,
0,
win32gui.NIF_ICON | win32gui.NIF_MESSAGE | win32gui.NIF_TIP,
win32con.WM_USER + 20,
hicon,
self.hover_text)
win32gui.Shell_NotifyIcon(message, self.notify_id)
def restart(self, hwnd, msg, wparam, lparam):
self.refresh_icon()
def destroy(self, hwnd, msg, wparam, lparam):
if self.on_quit: self.on_quit(self)
nid = (self.hwnd, 0)
win32gui.Shell_NotifyIcon(win32gui.NIM_DELETE, nid)
win32gui.PostQuitMessage(0) # Terminate the app.
def notify(self, hwnd, msg, wparam, lparam):
if lparam == win32con.WM_LBUTTONDBLCLK:
self.execute_menu_option(self.default_menu_index + self.FIRST_ID)
elif lparam == win32con.WM_RBUTTONUP:
self.show_menu()
elif lparam == win32con.WM_LBUTTONUP:
pass
return True
def show_menu(self):
menu = win32gui.CreatePopupMenu()
self.create_menu(menu, self.menu_options)
# win32gui.SetMenuDefaultItem(menu, 1000, 0)
pos = win32gui.GetCursorPos()
# See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/menus_0hdi.asp
win32gui.SetForegroundWindow(self.hwnd)
win32gui.TrackPopupMenu(menu,
win32con.TPM_LEFTALIGN,
pos[0],
pos[1],
0,
self.hwnd,
None)
win32gui.PostMessage(self.hwnd, win32con.WM_NULL, 0, 0)
def create_menu(self, menu, menu_options):
for option_text, option_icon, option_action, option_id in menu_options[::-1]:
if option_icon:
option_icon = self.prep_menu_icon(option_icon)
if option_id in self.menu_actions_by_id:
item, extras = win32gui_struct.PackMENUITEMINFO(text=option_text,
hbmpItem=option_icon,
wID=option_id)
win32gui.InsertMenuItem(menu, 0, 1, item)
else:
submenu = win32gui.CreatePopupMenu()
self.create_menu(submenu, option_action)
item, extras = win32gui_struct.PackMENUITEMINFO(text=option_text,
hbmpItem=option_icon,
hSubMenu=submenu)
win32gui.InsertMenuItem(menu, 0, 1, item)
def prep_menu_icon(self, icon):
# First load the icon.
ico_x = win32api.GetSystemMetrics(win32con.SM_CXSMICON)
ico_y = win32api.GetSystemMetrics(win32con.SM_CYSMICON)
hicon = win32gui.LoadImage(0, icon, win32con.IMAGE_ICON, ico_x, ico_y, win32con.LR_LOADFROMFILE)
hdcBitmap = win32gui.CreateCompatibleDC(0)
hdcScreen = win32gui.GetDC(0)
hbm = win32gui.CreateCompatibleBitmap(hdcScreen, ico_x, ico_y)
hbmOld = win32gui.SelectObject(hdcBitmap, hbm)
# Fill the background.
brush = win32gui.GetSysColorBrush(win32con.COLOR_MENU)
win32gui.FillRect(hdcBitmap, (0, 0, 16, 16), brush)
# unclear if brush needs to be feed. Best clue I can find is:
# "GetSysColorBrush returns a cached brush instead of allocating a new
# one." - implies no DeleteObject
# draw the icon
win32gui.DrawIconEx(hdcBitmap, 0, 0, hicon, ico_x, ico_y, 0, 0, win32con.DI_NORMAL)
win32gui.SelectObject(hdcBitmap, hbmOld)
win32gui.DeleteDC(hdcBitmap)
return hbm
def command(self, hwnd, msg, wparam, lparam):
id = win32gui.LOWORD(wparam)
self.execute_menu_option(id)
def execute_menu_option(self, id):
menu_action = self.menu_actions_by_id[id]
if menu_action == self.QUIT:
self.exit_app()
else:
menu_action(self)
def exit_app(self):
win32gui.DestroyWindow(self.hwnd)
def main():
def LBRYApp():
return SysTrayIcon(icon, hover_text, menu_options, on_quit=stop)
def openui_(sender):
webbrowser.open(UI_ADDRESS)
def replyToApplicationShouldTerminate_():
try:
reactor.stop()
except error.ReactorNotRunning:
log.debug('Reactor already stopped')
def stop(sysTrayIcon):
replyToApplicationShouldTerminate_()
if getattr(sys, 'frozen', False) and os.name == "nt":
icon = os.path.join(os.path.dirname(sys.executable), ICON_PATH, 'lbry16.ico')
else:
icon = os.path.join(ICON_PATH, 'lbry16.ico')
hover_text = APP_NAME
menu_options = (('Open', icon, openui_),)
if not test_internet_connection():
log.warn('No Internet Connection')
sys.exit(1)
systray_thread = threading.Thread(target=LBRYApp)
systray_thread.daemon = True
systray_thread.start()
lbry = LBRYDaemonServer()
d = lbry.start()
d.addCallback(lambda _: webbrowser.open(UI_ADDRESS))
lbrynet_server = server.Site(lbry.root)
lbrynet_server.requestFactory = LBRYDaemonRequest
reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE)
reactor.run()
if __name__ == '__main__':
main()

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 361 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

View file

@ -3,12 +3,25 @@
To create local builds and distributable .msi, run the following command:
python setup_win32.py build bdist_msi
"""
import opcode
import os
import pkg_resources
import sys
from cx_Freeze import setup, Executable
import requests.certs
from lbrynet import __version__
win_icon = os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico')
wordlist_path = pkg_resources.resource_filename('lbryum', 'wordlist')
base_dir = os.path.abspath(os.path.dirname(__file__))
# Allow virtualenv to find distutils of base python installation
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
def find_data_file(filename):
if getattr(sys, 'frozen', False):
# The application is frozen
@ -19,58 +32,102 @@ def find_data_file(filename):
data_dir = os.path.dirname(__file__)
return os.path.join(data_dir, filename)
shortcut_table = [
('DesktopShortcut', # Shortcut
'DesktopFolder', # Directory
'LBRY', # Name
'TARGETDIR', # Component
'[TARGETDIR]\LBRY.exe', # Target
None, # Arguments
None, # Description
None, # Hotkey
os.path.join('lbrynet', 'lbrynet_gui', 'lbry-dark-icon.ico'), # Icon
None, # IconIndex
None, # ShowCmd
'TARGETDIR', # WkDir
),
]
console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
'lbrynet-create-network = lbrynet.create_network:main',
'lbrynet-launch-node = lbrynet.dht.node:main',
'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main']
# Now create the table dictionary
msi_data = {'Shortcut': shortcut_table}
# shortcut_table = [
# ('DesktopShortcut', # Shortcut
# 'DesktopFolder', # Directory
# 'LBRY 1', # Name
# 'TARGETDIR', # Component
# '[TARGETDIR]\LBRY.exe', # Target
# None, # Arguments
# None, # Description
# None, # Hotkey
# win_icon, # Icon
# None, # IconIndex
# None, # ShowCmd
# 'TARGETDIR', # WkDir
# ),
# ]
#
# # Now create the table dictionary
# msi_data = {'Shortcut': shortcut_table}
bdist_msi_options = {
'upgrade_code': '{66620F3A-DC3A-11E2-B341-002219E9B01F}',
# 'upgrade_code': '{66620F3A-DC3A-11E2-B341-002219E9B01F}',
'add_to_path': False,
'initial_target_dir': r'[LocalAppDataFolder]\LBRY',
'data': msi_data,
# 'data': msi_data,
}
build_exe_options = {
'include_msvcr': True,
'includes': [],
'packages': ['six', 'os', 'twisted', 'miniupnpc', 'unqlite', 'seccure',
'requests', 'bitcoinrpc', 'txjsonrpc', 'win32api', 'Crypto',
'gmpy', 'yapsy', 'lbryum', 'google.protobuf'],
'excludes': ['zope.interface._zope_interface_coptimizations'],
'include_files': [os.path.join('lbrynet', 'lbrynet_gui', 'close.gif'),
os.path.join('lbrynet', 'lbrynet_gui', 'close1.png'),
os.path.join('lbrynet', 'lbrynet_gui', 'close2.gif'),
os.path.join('lbrynet', 'lbrynet_gui', 'drop_down.gif'),
os.path.join('lbrynet', 'lbrynet_gui', 'hide_options.gif'),
os.path.join('lbrynet', 'lbrynet_gui', 'lbry-dark-242x80.gif'),
os.path.join('lbrynet', 'lbrynet_gui', 'lbry-dark-icon.ico'),
os.path.join('lbrynet', 'lbrynet_gui', 'lbry-dark-icon.xbm'),
os.path.join('lbrynet', 'lbrynet_gui', 'show_options.gif'),
os.path.join('lbrycrdd.exe'), # Not included in repo
os.path.join('lbrycrd-cli.exe'), # Not included in repo
(requests.certs.where(), 'cacert.pem'),
'packages': ['cython',
'twisted',
'yapsy',
'appdirs',
'argparse',
'base58',
'colorama',
'cx_Freeze',
'dns',
'ecdsa',
'gmpy',
'googlefinance',
'jsonrpc',
'jsonrpclib',
'lbryum',
'loggly',
'miniupnpc',
'pbkdf2',
'google.protobuf',
'Crypto',
'bitcoinrpc',
'win32api',
'qrcode',
'requests',
'requests_futures',
'seccure',
'simplejson',
'six',
'aes',
'txjsonrpc',
'unqlite',
'wsgiref',
'zope.interface',
'os',
'pkg_resources'
],
'namespace_packages': ['zope']}
'excludes': ['distutils', 'collections.sys', 'collections._weakref', 'collections.abc',
'Tkinter', 'tk', 'tcl', 'PyQt4', 'nose', 'mock'
'zope.interface._zope_interface_coptimizations'],
'include_files': [(distutils_path, 'distutils'), (requests.certs.where(), 'cacert.pem'),
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry16.ico'),
os.path.join('icons', 'lbry16.ico')),
(os.path.join(wordlist_path, 'chinese_simplified.txt'),
os.path.join('wordlist', 'chinese_simplified.txt')),
(os.path.join(wordlist_path, 'english.txt'), os.path.join('wordlist', 'english.txt')),
(os.path.join(wordlist_path, 'japanese.txt'), os.path.join('wordlist', 'japanese.txt')),
(os.path.join(wordlist_path, 'portuguese.txt'), os.path.join('wordlist', 'portuguese.txt')),
(os.path.join(wordlist_path, 'spanish.txt'), os.path.join('wordlist', 'spanish.txt'))
],
'namespace_packages': ['zope', 'google']}
exe = Executable(
script=os.path.join('lbrynet', 'lbrynet_gui', 'gui.py'),
script=os.path.join('packaging', 'windows', 'lbry-win32-app', 'LBRYWin32App.py'),
base='Win32GUI',
icon=os.path.join('lbrynet', 'lbrynet_gui', 'lbry-dark-icon.ico'),
icon=win_icon,
compress=True,
shortcutName='LBRY',
shortcutDir='DesktopFolder',
@ -80,11 +137,12 @@ exe = Executable(
setup(
name='LBRY',
version='0.0.4',
description='A fully decentralized network for distributing data',
version=__version__,
description='A decentralized media library and marketplace',
url='lbry.io',
author='',
author='LBRY, Inc.',
keywords='LBRY',
data_files=[],
options={'build_exe': build_exe_options,
'bdist_msi': bdist_msi_options},
executables=[exe],

View file

@ -0,0 +1,190 @@
import os
import shutil
from twisted.internet import defer, threads, error
from twisted.trial import unittest
from lbrynet import conf
from lbrynet import lbryfile
from lbrynet import reflector
from lbrynet.core import BlobManager
from lbrynet.core import PeerManager
from lbrynet.core import RateLimiter
from lbrynet.core import Session
from lbrynet.core import StreamDescriptor
from lbrynet.lbryfile import LBRYFileMetadataManager
from lbrynet.lbryfile.client import LBRYFileOptions
from lbrynet.lbryfilemanager import LBRYFileCreator
from lbrynet.lbryfilemanager import LBRYFileManager
from tests import mocks
class TestReflector(unittest.TestCase):
def setUp(self):
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
self.server_blob_manager = None
self.reflector_port = None
self.addCleanup(self.take_down_env)
def take_down_env(self):
d = defer.succeed(True)
if self.lbry_file_manager is not None:
d.addCallback(lambda _: self.lbry_file_manager.stop())
if self.session is not None:
d.addCallback(lambda _: self.session.shut_down())
if self.stream_info_manager is not None:
d.addCallback(lambda _: self.stream_info_manager.stop())
if self.server_blob_manager is not None:
d.addCallback(lambda _: self.server_blob_manager.stop())
if self.reflector_port is not None:
d.addCallback(lambda _: self.reflector_port.stopListening())
def delete_test_env():
shutil.rmtree('client')
d.addCallback(lambda _: threads.deferToThread(delete_test_env))
return d
def test_reflector(self):
wallet = mocks.Wallet()
peer_manager = PeerManager.PeerManager()
peer_finder = mocks.PeerFinder(5553, peer_manager, 2)
hash_announcer = mocks.Announcer()
rate_limiter = RateLimiter.DummyRateLimiter()
sd_identifier = StreamDescriptor.StreamDescriptorIdentifier()
self.expected_blobs = [
(
'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b'
'441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
2097152
),
(
'f4067522c1b49432a2a679512e3917144317caa1abba0c04'
'1e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
2097152
),
(
'305486c434260484fcb2968ce0e963b72f81ba56c11b08b1'
'af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
1015056
),
]
db_dir = "client"
os.mkdir(db_dir)
self.session = Session.LBRYSession(
conf.MIN_BLOB_DATA_PAYMENT_RATE,
db_dir=db_dir,
lbryid="abcd",
peer_finder=peer_finder,
hash_announcer=hash_announcer,
blob_dir=None,
peer_port=5553,
use_upnp=False,
rate_limiter=rate_limiter,
wallet=wallet
)
self.stream_info_manager = LBRYFileMetadataManager.TempLBRYFileMetadataManager()
self.lbry_file_manager = LBRYFileManager.LBRYFileManager(
self.session, self.stream_info_manager, sd_identifier)
self.server_blob_manager = BlobManager.TempBlobManager(hash_announcer)
d = self.session.setup()
d.addCallback(lambda _: self.stream_info_manager.setup())
d.addCallback(lambda _: LBRYFileOptions.add_lbry_file_to_sd_identifier(sd_identifier))
d.addCallback(lambda _: self.lbry_file_manager.setup())
d.addCallback(lambda _: self.server_blob_manager.setup())
def verify_equal(sd_info):
self.assertEqual(mocks.create_stream_sd_file, sd_info)
def save_sd_blob_hash(sd_hash):
self.expected_blobs.append((sd_hash, 923))
def verify_stream_descriptor_file(stream_hash):
d = lbryfile.get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True)
d.addCallback(verify_equal)
d.addCallback(
lambda _: lbryfile.publish_sd_blob(
self.lbry_file_manager.stream_info_manager,
self.session.blob_manager, stream_hash
)
)
d.addCallback(save_sd_blob_hash)
d.addCallback(lambda _: stream_hash)
return d
def create_stream():
test_file = mocks.GenFile(5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)]))
d = LBRYFileCreator.create_lbry_file(
self.session,
self.lbry_file_manager,
"test_file",
test_file,
key="0123456701234567",
iv_generator=iv_generator()
)
return d
def start_server():
server_factory = reflector.ServerFactory(peer_manager, self.server_blob_manager)
from twisted.internet import reactor
port = 8943
while self.reflector_port is None:
try:
self.reflector_port = reactor.listenTCP(port, server_factory)
except error.CannotListenError:
port += 1
return defer.succeed(port)
def send_to_server(port, stream_hash):
factory = reflector.ClientFactory(
self.session.blob_manager,
self.stream_info_manager,
stream_hash
)
from twisted.internet import reactor
reactor.connectTCP('localhost', port, factory)
return factory.finished_deferred
def verify_blob_completed(blob, blob_size):
self.assertTrue(blob.is_validated())
self.assertEqual(blob_size, blob.length)
def verify_have_blob(blob_hash, blob_size):
d = self.server_blob_manager.get_blob(blob_hash, True)
d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
return d
def verify_data_on_reflector():
check_blob_ds = []
for blob_hash, blob_size in self.expected_blobs:
check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
return defer.DeferredList(check_blob_ds)
def upload_to_reflector(stream_hash):
d = start_server()
d.addCallback(lambda port: send_to_server(port, stream_hash))
d.addCallback(lambda _: verify_data_on_reflector())
return d
d.addCallback(lambda _: create_stream())
d.addCallback(verify_stream_descriptor_file)
d.addCallback(upload_to_reflector)
return d
def iv_generator():
iv = 0
while True:
iv += 1
yield "%016d" % iv

View file

@ -26,7 +26,7 @@ from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.lbryfilemanager.LBRYFileCreator import create_lbry_file
from lbrynet.lbryfile.client.LBRYFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.lbryfile.StreamDescriptor import get_sd_info
from twisted.internet import defer, threads, task
from twisted.internet import defer, threads, task, error
from twisted.trial.unittest import TestCase
from twisted.python.failure import Failure
import os
@ -38,6 +38,10 @@ from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.lbrylive.server.LiveBlobInfoQueryHandler import CryptBlobInfoQueryHandlerFactory
from lbrynet.lbrylive.client.LiveStreamOptions import add_live_stream_to_sd_identifier
from lbrynet.lbrylive.client.LiveStreamDownloader import add_full_live_stream_downloader_to_sd_identifier
from lbrynet.core.BlobManager import TempBlobManager
from lbrynet.reflector.client.client import LBRYFileReflectorClientFactory
from lbrynet.reflector.server.server import ReflectorServerFactory
from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob
log_format = "%(funcName)s(): %(message)s"
@ -1370,3 +1374,146 @@ class TestStreamify(TestCase):
d.addCallback(lambda _: create_stream())
d.addCallback(combine_stream)
return d
class TestReflector(TestCase):
def setUp(self):
self.session = None
self.stream_info_manager = None
self.lbry_file_manager = None
self.server_blob_manager = None
self.reflector_port = None
self.addCleanup(self.take_down_env)
def take_down_env(self):
d = defer.succeed(True)
if self.lbry_file_manager is not None:
d.addCallback(lambda _: self.lbry_file_manager.stop())
if self.session is not None:
d.addCallback(lambda _: self.session.shut_down())
if self.stream_info_manager is not None:
d.addCallback(lambda _: self.stream_info_manager.stop())
if self.server_blob_manager is not None:
d.addCallback(lambda _: self.server_blob_manager.stop())
if self.reflector_port is not None:
d.addCallback(lambda _: self.reflector_port.stopListening())
def delete_test_env():
shutil.rmtree('client')
d.addCallback(lambda _: threads.deferToThread(delete_test_env))
return d
def test_reflector(self):
wallet = FakeWallet()
peer_manager = PeerManager()
peer_finder = FakePeerFinder(5553, peer_manager, 2)
hash_announcer = FakeAnnouncer()
rate_limiter = DummyRateLimiter()
sd_identifier = StreamDescriptorIdentifier()
self.expected_blobs = [
('dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b'
'441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586', 2097152),
('f4067522c1b49432a2a679512e3917144317caa1abba0c04'
'1e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70', 2097152),
('305486c434260484fcb2968ce0e963b72f81ba56c11b08b1'
'af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9', 1015056),
]
db_dir = "client"
os.mkdir(db_dir)
self.session = LBRYSession(MIN_BLOB_DATA_PAYMENT_RATE, db_dir=db_dir, lbryid="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=None, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet)
self.stream_info_manager = TempLBRYFileMetadataManager()
self.lbry_file_manager = LBRYFileManager(self.session, self.stream_info_manager, sd_identifier)
self.server_blob_manager = TempBlobManager(hash_announcer)
d = self.session.setup()
d.addCallback(lambda _: self.stream_info_manager.setup())
d.addCallback(lambda _: add_lbry_file_to_sd_identifier(sd_identifier))
d.addCallback(lambda _: self.lbry_file_manager.setup())
d.addCallback(lambda _: self.server_blob_manager.setup())
def verify_equal(sd_info):
self.assertEqual(sd_info, test_create_stream_sd_file)
def save_sd_blob_hash(sd_hash):
self.expected_blobs.append((sd_hash, 923))
def verify_stream_descriptor_file(stream_hash):
d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True)
d.addCallback(verify_equal)
d.addCallback(lambda _: publish_sd_blob(self.lbry_file_manager.stream_info_manager, self.session.blob_manager, stream_hash))
d.addCallback(save_sd_blob_hash)
d.addCallback(lambda _: stream_hash)
return d
def iv_generator():
iv = 0
while 1:
iv += 1
yield "%016d" % iv
def create_stream():
test_file = GenFile(5209343, b''.join([chr(i + 3) for i in xrange(0, 64, 6)]))
d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file,
key="0123456701234567", iv_generator=iv_generator())
return d
def start_server():
server_factory = ReflectorServerFactory(peer_manager, self.server_blob_manager)
from twisted.internet import reactor
port = 8943
while self.reflector_port is None:
try:
self.reflector_port = reactor.listenTCP(port, server_factory)
except error.CannotListenError:
port += 1
return defer.succeed(port)
def send_to_server(port, stream_hash):
factory = LBRYFileReflectorClientFactory(
self.session.blob_manager,
self.stream_info_manager,
stream_hash
)
from twisted.internet import reactor
reactor.connectTCP('localhost', port, factory)
return factory.finished_deferred
def verify_blob_completed(blob, blob_size):
self.assertTrue(blob.is_validated())
self.assertEqual(blob_size, blob.length)
def verify_have_blob(blob_hash, blob_size):
d = self.server_blob_manager.get_blob(blob_hash, True)
d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
return d
def verify_data_on_reflector():
check_blob_ds = []
for blob_hash, blob_size in self.expected_blobs:
check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
return defer.DeferredList(check_blob_ds)
def upload_to_reflector(stream_hash):
d = start_server()
d.addCallback(lambda port: send_to_server(port, stream_hash))
d.addCallback(lambda _: verify_data_on_reflector())
return d
d.addCallback(lambda _: create_stream())
d.addCallback(verify_stream_descriptor_file)
d.addCallback(upload_to_reflector)
return d

164
tests/mocks.py Normal file
View file

@ -0,0 +1,164 @@
import io
from Crypto.PublicKey import RSA
from twisted.internet import defer, threads, task, error
from lbrynet.core import PTCWallet
class Node(object):
def __init__(self, *args, **kwargs):
pass
def joinNetwork(self, *args):
pass
def stop(self):
pass
class Wallet(object):
def __init__(self):
self.private_key = RSA.generate(1024)
self.encoded_public_key = self.private_key.publickey().exportKey()
def start(self):
return defer.succeed(True)
def stop(self):
return defer.succeed(True)
def get_info_exchanger(self):
return PTCWallet.PointTraderKeyExchanger(self)
def get_wallet_info_query_handler_factory(self):
return PTCWallet.PointTraderKeyQueryHandlerFactory(self)
def reserve_points(self, *args):
return True
def cancel_point_reservation(self, *args):
pass
def send_points(self, *args):
return defer.succeed(True)
def add_expected_payment(self, *args):
pass
def get_balance(self):
return defer.succeed(1000)
def set_public_key_for_peer(self, peer, public_key):
pass
class PeerFinder(object):
def __init__(self, start_port, peer_manager, num_peers):
self.start_port = start_port
self.peer_manager = peer_manager
self.num_peers = num_peers
self.count = 0
def find_peers_for_blob(self, *args):
peer_port = self.start_port + self.count
self.count += 1
if self.count >= self.num_peers:
self.count = 0
return defer.succeed([self.peer_manager.get_peer("127.0.0.1", peer_port)])
def run_manage_loop(self):
pass
def stop(self):
pass
class Announcer(object):
def __init__(self, *args):
pass
def add_supplier(self, supplier):
pass
def immediate_announce(self, *args):
pass
def run_manage_loop(self):
pass
def stop(self):
pass
class GenFile(io.RawIOBase):
def __init__(self, size, pattern):
io.RawIOBase.__init__(self)
self.size = size
self.pattern = pattern
self.read_so_far = 0
self.buff = b''
self.last_offset = 0
def readable(self):
return True
def writable(self):
return False
def read(self, n=-1):
if n > -1:
bytes_to_read = min(n, self.size - self.read_so_far)
else:
bytes_to_read = self.size - self.read_so_far
output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:]
bytes_to_read -= len(output)
while bytes_to_read > 0:
self.buff = self._generate_chunk()
new_output, self.buff = self.buff[:bytes_to_read], self.buff[bytes_to_read:]
bytes_to_read -= len(new_output)
output += new_output
self.read_so_far += len(output)
return output
def readall(self):
return self.read()
def _generate_chunk(self, n=2**10):
output = self.pattern[self.last_offset:self.last_offset + n]
n_left = n - len(output)
whole_patterns = n_left / len(self.pattern)
output += self.pattern * whole_patterns
self.last_offset = n - len(output)
output += self.pattern[:self.last_offset]
return output
create_stream_sd_file = {
'stream_name': '746573745f66696c65',
'blobs': [
{
'length': 2097152,
'blob_num': 0,
'blob_hash': 'dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586',
'iv': '30303030303030303030303030303031'
},
{
'length': 2097152,
'blob_num': 1,
'blob_hash': 'f4067522c1b49432a2a679512e3917144317caa1abba0c041e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70',
'iv': '30303030303030303030303030303032'
},
{
'length': 1015056,
'blob_num': 2,
'blob_hash': '305486c434260484fcb2968ce0e963b72f81ba56c11b08b1af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9',
'iv': '30303030303030303030303030303033'
},
{'length': 0, 'blob_num': 3, 'iv': '30303030303030303030303030303034'}
],
'stream_type': 'lbryfile',
'key': '30313233343536373031323334353637',
'suggested_file_name': '746573745f66696c65',
'stream_hash': '6d27fbe10c86d81aacfb897c7a426d0a2214f5a299455a6d315c0f998c4b3545c2dc60906122d94653c23b1898229e3f'
}

View file

View file

View file

@ -19,12 +19,10 @@ class LBRYFeeFormatTest(unittest.TestCase):
class LBRYFeeTest(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('time.time')
self.time = self.patcher.start()
patcher = mock.patch('time.time')
self.time = patcher.start()
self.time.return_value = 0
def tearDown(self):
self.time.stop()
self.addCleanup(patcher.stop)
def test_fee_converts_to_lbc(self):
fee_dict = {

View file