Merge pull request #36 from lbryio/add-tests-to-travis

Add tests to travis
This commit is contained in:
Jack Robison 2016-06-07 04:51:09 -04:00
commit b1566e904b
28 changed files with 1247 additions and 533 deletions

2
.gitignore vendored
View file

@ -23,3 +23,5 @@ lbrynet.egg-info/PKG-INFO
*.pem *.pem
*.decTest *.decTest
.coverage

379
.pylintrc Normal file
View file

@ -0,0 +1,379 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=1
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality.
optimize-ast=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[BASIC]
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,input
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=twisted.internet.reactor,leveldb
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set). This supports can work
# with qualified names.
ignored-classes=twisted.internet,RequestMessage
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View file

@ -13,7 +13,8 @@ before_install:
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install --upgrade pip virtualenv; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo pip install --upgrade pip virtualenv; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv $HOME/venv; source $HOME/venv/bin/activate; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv $HOME/venv; source $HOME/venv/bin/activate; fi
install: true install:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./packaging/travis/install_dependencies_and_run_tests.sh; fi
before_script: before_script:
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then openssl aes-256-cbc -k "$ENCRYPTION_SECRET" -in packaging/osx/certs/dist.cer.enc -d -a -out packaging/osx/certs/dist.cer; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then openssl aes-256-cbc -k "$ENCRYPTION_SECRET" -in packaging/osx/certs/dist.cer.enc -d -a -out packaging/osx/certs/dist.cer; fi
@ -25,8 +26,7 @@ script:
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew upgrade gmp; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew upgrade gmp; fi
# the default py2app (v0.9) has a bug that is fixed in the head of /metachris/py2app # the default py2app (v0.9) has a bug that is fixed in the head of /metachris/py2app
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip install git+https://github.com/metachris/py2app; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip install git+https://github.com/metachris/py2app; fi
# py2app fails to find jsonrpc unless json-rpc is installed. why? I don't know. - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip install jsonrpc; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then pip install json-rpc; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then cd packaging/osx/lbry-osx-app; ./setup_app.sh; cd $TRAVIS_BUILD_DIR; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then cd packaging/osx/lbry-osx-app; ./setup_app.sh; cd $TRAVIS_BUILD_DIR; fi
# fail the build if this is a build for a tag and we don't have the versions matching # fail the build if this is a build for a tag and we don't have the versions matching
- if [[ -n "${TRAVIS_TAG}" ]]; then if [[ "v`python setup.py -V`" = "${TRAVIS_TAG}" ]]; then true; else false; fi; fi - if [[ -n "${TRAVIS_TAG}" ]]; then if [[ "v`python setup.py -V`" = "${TRAVIS_TAG}" ]]; then true; else false; fi; fi

View file

@ -4,5 +4,5 @@ import logging
logging.getLogger(__name__).addHandler(logging.NullHandler()) logging.getLogger(__name__).addHandler(logging.NullHandler())
version = (0, 2, 4) version = (0, 2, 5)
__version__ = ".".join([str(x) for x in version]) __version__ = ".".join([str(x) for x in version])

View file

@ -36,8 +36,10 @@ UI_ADDRESS = "http://" + API_INTERFACE + ":" + str(API_PORT)
PROTOCOL_PREFIX = "lbry" PROTOCOL_PREFIX = "lbry"
DEFAULT_WALLET = "lbryum" DEFAULT_WALLET = "lbryum"
WALLET_TYPES = ["lbryum", "lbrycrd"]
DEFAULT_TIMEOUT = 30 DEFAULT_TIMEOUT = 30
DEFAULT_MAX_SEARCH_RESULTS = 25 DEFAULT_MAX_SEARCH_RESULTS = 25
DEFAULT_MAX_KEY_FEE = 100.0 DEFAULT_MAX_KEY_FEE = 100.0
DEFAULT_SEARCH_TIMEOUT = 3.0 DEFAULT_SEARCH_TIMEOUT = 3.0
DEFAULT_CACHE_TIME = 3600 DEFAULT_CACHE_TIME = 3600
DEFAULT_UI_BRANCH = "master"

View file

@ -295,6 +295,11 @@ class LBRYWallet(object):
d.addCallback(self._get_stream_info_from_value, name) d.addCallback(self._get_stream_info_from_value, name)
return d return d
def get_txid_for_name(self, name):
d = self._get_value_for_name(name)
d.addCallback(lambda r: None if 'txid' not in r else r['txid'])
return d
def get_stream_info_from_txid(self, name, txid): def get_stream_info_from_txid(self, name, txid):
d = self.get_claims_from_tx(txid) d = self.get_claims_from_tx(txid)

View file

@ -52,7 +52,7 @@ class DownloadManager(object):
def check_stop(result, manager): def check_stop(result, manager):
if isinstance(result, failure.Failure): if isinstance(result, failure.Failure):
log.error("Failed to stop the %s: %s", manager. result.getErrorMessage()) log.error("Failed to stop the %s: %s", manager, result.getErrorMessage())
return False return False
return True return True

View file

@ -1,3 +1,6 @@
# pylint: skip-file
# This file is not maintained, but might be used in the future
#
import logging import logging
import sys import sys
from lbrynet.lbrylive.LiveStreamCreator import StdOutLiveStreamCreator from lbrynet.lbrylive.LiveStreamCreator import StdOutLiveStreamCreator

View file

@ -1,7 +1,10 @@
# pylint: skip-file
# This file is not maintained, but might be used in the future
#
import logging import logging
import sys import sys
from lbrynet.lbrynet_console.plugins.LBRYLive.LBRYLiveStreamDownloader import LBRYLiveStreamDownloader from lbrynet.lbrylive.client.LiveStreamDownloader import LBRYLiveStreamDownloader
from lbrynet.core.BlobManager import TempBlobManager from lbrynet.core.BlobManager import TempBlobManager
from lbrynet.core.Session import LBRYSession from lbrynet.core.Session import LBRYSession
from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader

View file

@ -120,7 +120,7 @@ class CommandHandlerFactory(object):
return self.control_handler_class.prompt_description return self.control_handler_class.prompt_description
def get_handler(self, console): def get_handler(self, console):
return self.control_handler_class(console, *self.args) return self.control_handler_class(console, *self.args) # pylint: disable=not-callable
class CommandHandler(object): class CommandHandler(object):

View file

@ -101,7 +101,7 @@ class ValuableBlobHashQueryHandler(ValuableQueryHandler):
for blob_hash, count in valuable_hashes: for blob_hash, count in valuable_hashes:
hashes_and_scores.append((blob_hash, 1.0 * count / 10.0)) hashes_and_scores.append((blob_hash, 1.0 * count / 10.0))
if len(hashes_and_scores) != 0: if len(hashes_and_scores) != 0:
log.info("Responding to a valuable blob hashes request with %s blob hashes: %s", log.info("Responding to a valuable blob hashes request with %s blob hashes",
str(len(hashes_and_scores))) str(len(hashes_and_scores)))
expected_payment = 1.0 * len(hashes_and_scores) * self.valuable_blob_hash_payment_rate / 1000.0 expected_payment = 1.0 * len(hashes_and_scores) * self.valuable_blob_hash_payment_rate / 1000.0
self.wallet.add_expected_payment(self.peer, expected_payment) self.wallet.add_expected_payment(self.peer, expected_payment)
@ -193,7 +193,7 @@ class ValuableBlobLengthQueryHandler(ValuableQueryHandler):
if success is True: if success is True:
lengths.append(response_pair) lengths.append(response_pair)
if len(lengths) > 0: if len(lengths) > 0:
log.info("Responding with %s blob lengths: %s", str(len(lengths))) log.info("Responding with %s blob lengths", str(len(lengths)))
expected_payment = 1.0 * len(lengths) * self.blob_length_payment_rate / 1000.0 expected_payment = 1.0 * len(lengths) * self.blob_length_payment_rate / 1000.0
self.wallet.add_expected_payment(self.peer, expected_payment) self.wallet.add_expected_payment(self.peer, expected_payment)
self.peer.update_stats('uploaded_valuable_blob_infos', len(lengths)) self.peer.update_stats('uploaded_valuable_blob_infos', len(lengths))

View file

@ -1,108 +0,0 @@
import rumps
import xmlrpclib
import os
import webbrowser
import subprocess
import argparse
class DaemonStatusBarApp(rumps.App):
def __init__(self):
icon_path = 'app.icns'
if os.path.isfile(icon_path):
rumps.App.__init__(self, name="LBRY", icon=icon_path, quit_button=None,
menu=["Open", "Preferences", "View balance", "Quit"])
else:
rumps.App.__init__(self, name="LBRY", title="LBRY", quit_button=None,
menu=["Open", "Preferences", "View balance", "Quit"])
@rumps.timer(1)
def alert_daemon_start(self):
daemon = xmlrpclib.ServerProxy("http://localhost:7080/")
try:
start_msg = daemon.is_running()
if isinstance(start_msg, str):
rumps.notification(title='LBRY', subtitle='', message=str(start_msg), sound=True)
update_info = daemon.check_for_new_version()
update_msg = ""
for p in update_info:
if not p[0]:
update_msg += p[1] + "\n"
if update_msg:
update_msg += "\n Try running the installer again to fix this"
rumps.notification(title='LBRY', subtitle='', message=update_msg, sound=True)
except:
pass
@rumps.clicked('Open')
def get_ui(self):
daemon = xmlrpclib.ServerProxy("http://localhost:7080/")
try:
daemon.is_running()
webbrowser.get('safari').open("lbry://lbry")
except:
try:
rumps.notification(title='LBRY', subtitle='', message="Couldn't connect to lbrynet daemon", sound=True)
except:
rumps.alert(title='LBRY', message="Couldn't connect to lbrynet daemon")
@rumps.clicked("Preferences")
def prefs(self):
daemon = xmlrpclib.ServerProxy("http://localhost:7080/")
try:
daemon.is_running()
webbrowser.get('safari').open("lbry://settings")
except:
rumps.notification(title='LBRY', subtitle='', message="Couldn't connect to lbrynet daemon", sound=True)
@rumps.clicked("View balance")
def disp_balance(self):
daemon = xmlrpclib.ServerProxy("http://localhost:7080/")
try:
balance = daemon.get_balance()
r = round(float(balance), 2)
try:
rumps.notification(title='LBRY', subtitle='', message=str("Your balance is %.2f LBC" % r), sound=False)
except:
rumps.alert(title='LBRY', message=str("Your balance is %.2f LBC" % r))
except:
try:
rumps.notification(title='LBRY', subtitle='', message="Couldn't connect to lbrynet daemon", sound=True)
except:
rumps.alert(title='LBRY', message="Couldn't connect to lbrynet daemon")
@rumps.clicked('Quit')
def clean_quit(self):
daemon = xmlrpclib.ServerProxy("http://localhost:7080/")
try:
daemon.stop()
except:
pass
rumps.quit_application()
def main():
parser = argparse.ArgumentParser(description="Launch lbrynet status bar application")
parser.add_argument("--startdaemon",
help="true or false, default true",
type=str,
default="true")
args = parser.parse_args()
if str(args.startdaemon).lower() == "true":
daemon = xmlrpclib.ServerProxy('http://localhost:7080')
try:
daemon.is_running()
except:
subprocess.Popen("screen -dmS lbrynet bash -c "
"'PYTHONPATH=$PYTHONPATH:`cat /Users/${USER}/Library/Application\ Support/lbrynet/.python_path`; "
"PATH=$PATH:`cat /Users/${USER}/Library/Application\ Support/lbrynet/.lbry_bin_path`; "
"lbrynet-daemon --update=False'", shell=True)
status_app = DaemonStatusBarApp()
status_app.run()
if __name__ == '__main__':
main()

View file

@ -1,60 +0,0 @@
import os
import json
import webbrowser
import subprocess
import sys
from time import sleep
from jsonrpc.proxy import JSONRPCProxy
API_CONNECTION_STRING = "http://localhost:5279/lbryapi"
UI_ADDRESS = "http://localhost:5279"
class LBRYURIHandler(object):
def __init__(self):
self.started_daemon = False
self.daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING)
def handle_osx(self, lbry_name):
try:
status = self.daemon.is_running()
except:
os.system("open /Applications/LBRY.app")
sleep(3)
if lbry_name == "lbry" or lbry_name == "":
webbrowser.open(UI_ADDRESS)
else:
webbrowser.open(UI_ADDRESS + "/?watch=" + lbry_name)
def handle_linux(self, lbry_name):
try:
status = self.daemon.is_running()
except:
cmd = r'DIR = "$( cd "$(dirname "${BASH_SOURCE[0]}" )" && pwd )"' \
r'if [-z "$(pgrep lbrynet-daemon)"]; then' \
r'echo "running lbrynet-daemon..."' \
r'$DIR / lbrynet - daemon &' \
r'sleep 3 # let the daemon load before connecting' \
r'fi'
subprocess.Popen(cmd, shell=True)
if lbry_name == "lbry" or lbry_name == "":
webbrowser.open(UI_ADDRESS)
else:
webbrowser.open(UI_ADDRESS + "/?watch=" + lbry_name)
def main(args):
if len(args) != 1:
args = ['lbry://lbry']
name = args[0][7:]
if sys.platform == "darwin":
LBRYURIHandler().handle_osx(lbry_name=name)
else:
LBRYURIHandler().handle_linux(lbry_name=name)
if __name__ == "__main__":
main(sys.argv[1:])

View file

@ -1,5 +1,6 @@
import locale import locale
import os import os
import subprocess
import sys import sys
import simplejson as json import simplejson as json
import binascii import binascii
@ -31,13 +32,14 @@ from lbrynet.core.Error import UnknownNameError, InsufficientFundsError
from lbrynet.lbryfile.StreamDescriptor import LBRYFileStreamType from lbrynet.lbryfile.StreamDescriptor import LBRYFileStreamType
from lbrynet.lbryfile.client.LBRYFileDownloader import LBRYFileSaverFactory, LBRYFileOpenerFactory from lbrynet.lbryfile.client.LBRYFileDownloader import LBRYFileSaverFactory, LBRYFileOpenerFactory
from lbrynet.lbryfile.client.LBRYFileOptions import add_lbry_file_to_sd_identifier from lbrynet.lbryfile.client.LBRYFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.lbrynet_daemon.LBRYUIManager import LBRYUIManager
from lbrynet.lbrynet_daemon.LBRYDownloader import GetStream from lbrynet.lbrynet_daemon.LBRYDownloader import GetStream
from lbrynet.lbrynet_daemon.LBRYPublisher import Publisher from lbrynet.lbrynet_daemon.LBRYPublisher import Publisher
from lbrynet.core.utils import generate_id from lbrynet.core.utils import generate_id
from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, \ from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, \
DEFAULT_WALLET, DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME DEFAULT_WALLET, DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH
from lbrynet.conf import API_CONNECTION_STRING, API_PORT, API_ADDRESS, DEFAULT_TIMEOUT, UI_ADDRESS from lbrynet.conf import DEFAULT_TIMEOUT, WALLET_TYPES
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob
from lbrynet.core.Session import LBRYSession from lbrynet.core.Session import LBRYSession
from lbrynet.core.PTCWallet import PTCWallet from lbrynet.core.PTCWallet import PTCWallet
@ -109,7 +111,7 @@ CONNECTION_PROBLEM_CODES = [
ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run', ALLOWED_DURING_STARTUP = ['is_running', 'is_first_run',
'get_time_behind_blockchain', 'stop', 'get_time_behind_blockchain', 'stop',
'daemon_status', 'get_start_notice', 'daemon_status', 'get_start_notice',
'version', 'check_for_new_version'] 'version']
BAD_REQUEST = 400 BAD_REQUEST = 400
NOT_FOUND = 404 NOT_FOUND = 404
@ -129,7 +131,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
isLeaf = True isLeaf = True
def __init__(self, ui_version_info, wallet_type=DEFAULT_WALLET): def __init__(self, root, wallet_type=DEFAULT_WALLET):
jsonrpc.JSONRPC.__init__(self) jsonrpc.JSONRPC.__init__(self)
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
@ -139,9 +141,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.connected_to_internet = True self.connected_to_internet = True
self.connection_problem = None self.connection_problem = None
self.query_handlers = {} self.query_handlers = {}
self.ui_version = ui_version_info.replace('\n', '')
self.git_lbrynet_version = None self.git_lbrynet_version = None
self.git_lbryum_version = None self.git_lbryum_version = None
self.ui_version = None
self.ip = None
self.wallet_type = wallet_type self.wallet_type = wallet_type
self.first_run = None self.first_run = None
self.log_file = LOG_FILENAME self.log_file = LOG_FILENAME
@ -151,20 +154,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.waiting_on = {} self.waiting_on = {}
self.streams = {} self.streams = {}
self.known_dht_nodes = KNOWN_DHT_NODES self.known_dht_nodes = KNOWN_DHT_NODES
self.platform_info = { self.first_run_after_update = False
"processor": platform.processor(),
"python_version: ": platform.python_version(),
"platform": platform.platform(),
"os_release": platform.release(),
"os_system": platform.system(),
"lbrynet_version: ": lbrynet_version,
"lbryum_version: ": lbryum_version,
"ui_version": self.ui_version,
}
try:
self.platform_info['ip'] = json.load(urlopen('http://jsonip.com'))['ip']
except:
self.platform_info['ip'] = "Could not determine"
if os.name == "nt": if os.name == "nt":
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
@ -197,7 +187,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
'use_upnp': True, 'use_upnp': True,
'start_lbrycrdd': True, 'start_lbrycrdd': True,
'requested_first_run_credits': False, 'requested_first_run_credits': False,
'cache_time': DEFAULT_CACHE_TIME 'cache_time': DEFAULT_CACHE_TIME,
'startup_scripts': [],
'last_version': {'lbrynet': lbrynet_version, 'lbryum': lbryum_version}
} }
if os.path.isfile(self.daemon_conf): if os.path.isfile(self.daemon_conf):
@ -234,6 +226,20 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.session_settings = settings_dict self.session_settings = settings_dict
if 'last_version' in missing_settings.keys():
self.session_settings['last_version'] = None
if self.session_settings['last_version'] != self.default_settings['last_version']:
self.session_settings['last_version'] = self.default_settings['last_version']
f = open(self.daemon_conf, "w")
f.write(json.dumps(self.session_settings))
f.close()
self.first_run_after_update = True
log.info("First run after update")
if lbrynet_version == '0.2.5':
self.session_settings['startup_scripts'].append({'script_name': 'migrateto025', 'run_once': True})
self.run_on_startup = self.session_settings['run_on_startup'] self.run_on_startup = self.session_settings['run_on_startup']
self.data_rate = self.session_settings['data_rate'] self.data_rate = self.session_settings['data_rate']
self.max_key_fee = self.session_settings['max_key_fee'] self.max_key_fee = self.session_settings['max_key_fee']
@ -244,7 +250,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.search_timeout = self.session_settings['search_timeout'] self.search_timeout = self.session_settings['search_timeout']
self.download_timeout = self.session_settings['download_timeout'] self.download_timeout = self.session_settings['download_timeout']
self.max_search_results = self.session_settings['max_search_results'] self.max_search_results = self.session_settings['max_search_results']
self.wallet_type = self.session_settings['wallet_type'] if self.session_settings['wallet_type'] == wallet_type else wallet_type self.wallet_type = self.session_settings['wallet_type'] if self.session_settings['wallet_type'] in WALLET_TYPES else wallet_type
self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove'] self.delete_blobs_on_remove = self.session_settings['delete_blobs_on_remove']
self.peer_port = self.session_settings['peer_port'] self.peer_port = self.session_settings['peer_port']
self.dht_node_port = self.session_settings['dht_node_port'] self.dht_node_port = self.session_settings['dht_node_port']
@ -252,6 +258,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.start_lbrycrdd = self.session_settings['start_lbrycrdd'] self.start_lbrycrdd = self.session_settings['start_lbrycrdd']
self.requested_first_run_credits = self.session_settings['requested_first_run_credits'] self.requested_first_run_credits = self.session_settings['requested_first_run_credits']
self.cache_time = self.session_settings['cache_time'] self.cache_time = self.session_settings['cache_time']
self.startup_scripts = self.session_settings['startup_scripts']
if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")): if os.path.isfile(os.path.join(self.db_dir, "stream_info_cache.json")):
f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r") f = open(os.path.join(self.db_dir, "stream_info_cache.json"), "r")
@ -301,6 +308,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.sd_identifier = StreamDescriptorIdentifier() self.sd_identifier = StreamDescriptorIdentifier()
self.stream_info_manager = TempLBRYFileMetadataManager() self.stream_info_manager = TempLBRYFileMetadataManager()
self.settings = LBRYSettings(self.db_dir) self.settings = LBRYSettings(self.db_dir)
self.lbry_ui_manager = LBRYUIManager(root)
self.blob_request_payment_rate_manager = None self.blob_request_payment_rate_manager = None
self.lbry_file_metadata_manager = None self.lbry_file_metadata_manager = None
self.lbry_file_manager = None self.lbry_file_manager = None
@ -374,7 +382,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
log.error(failure) log.error(failure)
return jsonrpclib.Fault(self.FAILURE, "error") return jsonrpclib.Fault(self.FAILURE, "error")
def setup(self): def setup(self, branch=DEFAULT_UI_BRANCH, user_specified=False, branch_specified=False):
def _log_starting_vals(): def _log_starting_vals():
d = self._get_lbry_files() d = self._get_lbry_files()
d.addCallback(lambda r: json.dumps([d[1] for d in r])) d.addCallback(lambda r: json.dumps([d[1] for d in r]))
@ -394,12 +402,16 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.announced_startup = True self.announced_startup = True
self.startup_status = STARTUP_STAGES[5] self.startup_status = STARTUP_STAGES[5]
log.info("[" + str(datetime.now()) + "] Started lbrynet-daemon") log.info("[" + str(datetime.now()) + "] Started lbrynet-daemon")
if len(self.startup_scripts):
log.info("Scheduling scripts")
reactor.callLater(3, self._run_scripts)
# self.lbrynet_connection_checker.start(3600) # self.lbrynet_connection_checker.start(3600)
if self.first_run: if self.first_run:
d = self._upload_log(name_prefix="fr") d = self._upload_log(log_type="first_run")
else: else:
d = self._upload_log(exclude_previous=True, name_prefix="start") d = self._upload_log(exclude_previous=True, log_type="start")
if float(self.session.wallet.wallet_balance) == 0.0: if float(self.session.wallet.wallet_balance) == 0.0:
d.addCallback(lambda _: self._check_first_run()) d.addCallback(lambda _: self._check_first_run())
@ -415,6 +427,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
self.connection_problem_checker.start(1) self.connection_problem_checker.start(1)
d = defer.Deferred() d = defer.Deferred()
d.addCallback(lambda _: self.lbry_ui_manager.setup(branch=branch,
user_specified=user_specified,
branch_specified=branch_specified))
d.addCallback(lambda _: self._initial_setup()) d.addCallback(lambda _: self._initial_setup())
d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory)) d.addCallback(lambda _: threads.deferToThread(self._setup_data_directory))
d.addCallback(lambda _: self._check_db_migration()) d.addCallback(lambda _: self._check_db_migration())
@ -433,9 +448,29 @@ class LBRYDaemon(jsonrpc.JSONRPC):
return defer.succeed(None) return defer.succeed(None)
def _get_platform(self):
r = {
"processor": platform.processor(),
"python_version: ": platform.python_version(),
"platform": platform.platform(),
"os_release": platform.release(),
"os_system": platform.system(),
"lbrynet_version: ": lbrynet_version,
"lbryum_version: ": lbryum_version,
"ui_version": self.lbry_ui_manager.loaded_git_version,
}
if not self.ip:
try:
r['ip'] = json.load(urlopen('http://jsonip.com'))['ip']
self.ip = r['ip']
except:
r['ip'] = "Could not determine"
return r
def _initial_setup(self): def _initial_setup(self):
def _log_platform(): def _log_platform():
log.info("Platform: " + json.dumps(self.platform_info)) log.info("Platform: " + json.dumps(self._get_platform()))
return defer.succeed(None) return defer.succeed(None)
d = _log_platform() d = _log_platform()
@ -523,11 +558,14 @@ class LBRYDaemon(jsonrpc.JSONRPC):
return defer.succeed(True) return defer.succeed(True)
def _stop_server(self): def _stop_server(self):
try:
if self.lbry_server_port is not None: if self.lbry_server_port is not None:
self.lbry_server_port, p = None, self.lbry_server_port self.lbry_server_port, p = None, self.lbry_server_port
return defer.maybeDeferred(p.stopListening) return defer.maybeDeferred(p.stopListening)
else: else:
return defer.succeed(True) return defer.succeed(True)
except AttributeError:
return defer.succeed(True)
def _setup_server(self): def _setup_server(self):
def restore_running_status(running): def restore_running_status(running):
@ -579,12 +617,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
dl.addCallback(_set_query_handlers) dl.addCallback(_set_query_handlers)
return dl return dl
def _upload_log(self, name_prefix=None, exclude_previous=False, force=False): def _upload_log(self, log_type=None, exclude_previous=False, force=False):
if name_prefix:
name_prefix = name_prefix + "-" + platform.system()
else:
name_prefix = platform.system()
if self.upload_log or force: if self.upload_log or force:
LOG_URL = "https://lbry.io/log-upload" LOG_URL = "https://lbry.io/log-upload"
if exclude_previous: if exclude_previous:
@ -596,9 +629,13 @@ class LBRYDaemon(jsonrpc.JSONRPC):
f = open(self.log_file, "r") f = open(self.log_file, "r")
log_contents = f.read() log_contents = f.read()
f.close() f.close()
t = datetime.now() params = {
log_name = name_prefix + "-" + base58.b58encode(self.lbryid)[:20] + "-" + str(t.month) + "-" + str(t.day) + "-" + str(t.year) + "-" + str(t.hour) + "-" + str(t.minute) 'date': datetime.utcnow().strftime('%Y%m%d-%H%M%S'),
params = {'name': log_name, 'log': log_contents} 'hash': base58.b58encode(self.lbryid)[:20],
'sys': platform.system(),
'type': log_type,
'log': log_contents
}
requests.post(LOG_URL, params) requests.post(LOG_URL, params)
return defer.succeed(None) return defer.succeed(None)
@ -608,14 +645,21 @@ class LBRYDaemon(jsonrpc.JSONRPC):
def _shutdown(self): def _shutdown(self):
log.info("Closing lbrynet session") log.info("Closing lbrynet session")
log.info("Status at time of shutdown: " + self.startup_status[0]) log.info("Status at time of shutdown: " + self.startup_status[0])
if self.internet_connection_checker.running:
self.internet_connection_checker.stop()
if self.version_checker.running:
self.version_checker.stop()
if self.connection_problem_checker.running:
self.connection_problem_checker.stop()
d = self._upload_log(name_prefix="close", exclude_previous=False if self.first_run else True) d = self._upload_log(log_type="close", exclude_previous=False if self.first_run else True)
d.addCallback(lambda _: self._stop_server()) d.addCallback(lambda _: self._stop_server())
d.addErrback(lambda err: True)
d.addCallback(lambda _: self.lbry_file_manager.stop()) d.addCallback(lambda _: self.lbry_file_manager.stop())
d.addErrback(lambda err: log.info("Bad server shutdown: " + err.getTraceback())) d.addErrback(lambda err: True)
if self.session is not None: if self.session is not None:
d.addCallback(lambda _: self.session.shut_down()) d.addCallback(lambda _: self.session.shut_down())
d.addErrback(lambda err: log.info("Bad session shutdown: " + err.getTraceback())) d.addErrback(lambda err: True)
return d return d
def _update_settings(self, settings): def _update_settings(self, settings):
@ -794,7 +838,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
log.info("Using PTC wallet") log.info("Using PTC wallet")
d = defer.succeed(PTCWallet(self.db_dir)) d = defer.succeed(PTCWallet(self.db_dir))
else: else:
d = defer.fail() log.info("Requested unknown wallet '%s', using default lbryum" % self.wallet_type)
d = defer.succeed(LBRYumWallet(self.db_dir))
d.addCallback(lambda wallet: {"wallet": wallet}) d.addCallback(lambda wallet: {"wallet": wallet})
return d return d
@ -1017,13 +1062,21 @@ class LBRYDaemon(jsonrpc.JSONRPC):
f.close() f.close()
return defer.succeed(True) return defer.succeed(True)
def _resolve_name(self, name): def _resolve_name(self, name, force_refresh=False):
def _cache_stream_info(stream_info): def _cache_stream_info(stream_info):
def _add_txid(txid):
self.name_cache[name]['txid'] = txid
return defer.succeed(None)
self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()} self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()}
d = self._update_claim_cache() d = self.session.wallet.get_txid_for_name(name)
d.addCallback(_add_txid)
d.addCallback(lambda _: self._update_claim_cache())
d.addCallback(lambda _: self.name_cache[name]['claim_metadata']) d.addCallback(lambda _: self.name_cache[name]['claim_metadata'])
return d return d
if not force_refresh:
if name in self.name_cache.keys(): if name in self.name_cache.keys():
if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time: if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time:
log.info("[" + str(datetime.now()) + "] Returning cached stream info for lbry://" + name) log.info("[" + str(datetime.now()) + "] Returning cached stream info for lbry://" + name)
@ -1036,10 +1089,14 @@ class LBRYDaemon(jsonrpc.JSONRPC):
log.info("[" + str(datetime.now()) + "] Resolving stream info for lbry://" + name) log.info("[" + str(datetime.now()) + "] Resolving stream info for lbry://" + name)
d = self.session.wallet.get_stream_info_for_name(name) d = self.session.wallet.get_stream_info_for_name(name)
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError)) d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
else:
log.info("[" + str(datetime.now()) + "] Resolving stream info for lbry://" + name)
d = self.session.wallet.get_stream_info_for_name(name)
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
return d return d
def _delete_lbry_file(self, lbry_file): def _delete_lbry_file(self, lbry_file, delete_file=True):
d = self.lbry_file_manager.delete_lbry_file(lbry_file) d = self.lbry_file_manager.delete_lbry_file(lbry_file)
def finish_deletion(lbry_file): def finish_deletion(lbry_file):
@ -1052,6 +1109,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
d = self.lbry_file_manager.get_count_for_stream_hash(s_h) d = self.lbry_file_manager.get_count_for_stream_hash(s_h)
# TODO: could possibly be a timing issue here # TODO: could possibly be a timing issue here
d.addCallback(lambda c: self.stream_info_manager.delete_stream(s_h) if c == 0 else True) d.addCallback(lambda c: self.stream_info_manager.delete_stream(s_h) if c == 0 else True)
if delete_file:
d.addCallback(lambda _: os.remove(os.path.join(self.download_directory, lbry_file.file_name)) if d.addCallback(lambda _: os.remove(os.path.join(self.download_directory, lbry_file.file_name)) if
os.path.isfile(os.path.join(self.download_directory, lbry_file.file_name)) else defer.succeed(None)) os.path.isfile(os.path.join(self.download_directory, lbry_file.file_name)) else defer.succeed(None))
return d return d
@ -1160,7 +1218,10 @@ class LBRYDaemon(jsonrpc.JSONRPC):
if status[0] == DOWNLOAD_RUNNING_CODE: if status[0] == DOWNLOAD_RUNNING_CODE:
d = f.status() d = f.status()
d.addCallback(_get_file_status) d.addCallback(_get_file_status)
d.addCallback(lambda message: {'completed': f.completed, 'file_name': f.file_name, 'key': key, d.addCallback(lambda message: {'completed': f.completed, 'file_name': f.file_name,
'download_directory': f.download_directory,
'download_path': os.path.join(f.download_directory, f.file_name),
'key': key,
'points_paid': f.points_paid, 'stopped': f.stopped, 'points_paid': f.points_paid, 'stopped': f.stopped,
'stream_hash': f.stream_hash, 'stream_hash': f.stream_hash,
'stream_name': f.stream_name, 'stream_name': f.stream_name,
@ -1172,6 +1233,8 @@ class LBRYDaemon(jsonrpc.JSONRPC):
'message': message}) 'message': message})
else: else:
d = defer.succeed({'completed': f.completed, 'file_name': f.file_name, 'key': key, d = defer.succeed({'completed': f.completed, 'file_name': f.file_name, 'key': key,
'download_directory': f.download_directory,
'download_path': os.path.join(f.download_directory, f.file_name),
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash, 'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name, 'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, 'total_bytes': size, 'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, 'total_bytes': size,
@ -1221,6 +1284,31 @@ class LBRYDaemon(jsonrpc.JSONRPC):
requests.post(URL, json.dumps({"text": msg})) requests.post(URL, json.dumps({"text": msg}))
return defer.succeed(None) return defer.succeed(None)
def _run_scripts(self):
if len([k for k in self.startup_scripts if 'run_once' in k.keys()]):
log.info("Removing one time startup scripts")
f = open(self.daemon_conf, "r")
initialsettings = json.loads(f.read())
f.close()
t = [s for s in self.startup_scripts if 'run_once' not in s.keys()]
initialsettings['startup_scripts'] = t
f = open(self.daemon_conf, "w")
f.write(json.dumps(initialsettings))
f.close()
for script in self.startup_scripts:
if script['script_name'] == 'migrateto025':
log.info("Running migrator to 0.2.5")
from lbrynet.lbrynet_daemon.daemon_scripts.migrateto025 import run as run_migrate
run_migrate(self)
if script['script_name'] == 'Autofetcher':
log.info("Starting autofetcher script")
from lbrynet.lbrynet_daemon.daemon_scripts.Autofetcher import run as run_autofetcher
run_autofetcher(self)
return defer.succeed(None)
def _render_response(self, result, code): def _render_response(self, result, code):
return defer.succeed({'result': result, 'code': code}) return defer.succeed({'result': result, 'code': code})
@ -1330,10 +1418,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
"remote_lbryum": most recent lbryum version available from github "remote_lbryum": most recent lbryum version available from github
""" """
platform_info = self._get_platform()
msg = { msg = {
'platform': self.platform_info['platform'], 'platform': platform_info['platform'],
'os_release': self.platform_info['os_release'], 'os_release': platform_info['os_release'],
'os_system': self.platform_info['os_system'], 'os_system': platform_info['os_system'],
'lbrynet_version': lbrynet_version, 'lbrynet_version': lbrynet_version,
'lbryum_version': lbryum_version, 'lbryum_version': lbryum_version,
'ui_version': self.ui_version, 'ui_version': self.ui_version,
@ -1711,14 +1800,19 @@ class LBRYDaemon(jsonrpc.JSONRPC):
confirmation message confirmation message
""" """
if 'delete_target_file' in p.keys():
delete_file = p['delete_target_file']
else:
delete_file = True
def _delete_file(f): def _delete_file(f):
file_name = f.file_name file_name = f.file_name
d = self._delete_lbry_file(f) d = self._delete_lbry_file(f, delete_file=delete_file)
d.addCallback(lambda _: "Deleted LBRY file" + file_name) d.addCallback(lambda _: "Deleted LBRY file" + file_name)
return d return d
if p.keys()[0] in ['name', 'sd_hash', 'file_name']: if 'name' in p.keys() or 'sd_hash' in p.keys() or 'file_name' in p.keys():
search_type = p.keys()[0] search_type = [k for k in p.keys() if k != 'delete_target_file'][0]
d = self._get_lbry_file(search_type, p[search_type], return_json=False) d = self._get_lbry_file(search_type, p[search_type], return_json=False)
d.addCallback(lambda l: _delete_file(l) if l else False) d.addCallback(lambda l: _delete_file(l) if l else False)
@ -2051,26 +2145,19 @@ class LBRYDaemon(jsonrpc.JSONRPC):
# #
# return d # return d
def jsonrpc_check_for_new_version(self): def jsonrpc_log(self, message):
""" """
Checks local version against versions in __init__.py and version.py in the lbrynet and lbryum repos Log message
Args: Args:
None message: message to be logged
Returns: Returns:
true/false, true meaning that there is a new version available True
""" """
def _check_version(): log.info(message)
if (lbrynet_version >= self.git_lbrynet_version) and (lbryum_version >= self.git_lbryum_version):
log.info("[" + str(datetime.now()) + "] Up to date")
return self._render_response(False, OK_CODE)
else:
log.info("[" + str(datetime.now()) + "] Updates available")
return self._render_response(True, OK_CODE) return self._render_response(True, OK_CODE)
return _check_version()
def jsonrpc_upload_log(self, p=None): def jsonrpc_upload_log(self, p=None):
""" """
Upload log Upload log
@ -2078,15 +2165,15 @@ class LBRYDaemon(jsonrpc.JSONRPC):
Args, optional: Args, optional:
'name_prefix': prefix to indicate what is requesting the log upload 'name_prefix': prefix to indicate what is requesting the log upload
'exclude_previous': true/false, whether or not to exclude previous sessions from upload, defaults on true 'exclude_previous': true/false, whether or not to exclude previous sessions from upload, defaults on true
Returns Returns:
True True
""" """
if p: if p:
if 'name_prefix' in p.keys(): if 'name_prefix' in p.keys():
prefix = p['name_prefix'] + '_api' log_type = p['name_prefix'] + '_api'
else: else:
prefix = None log_type = None
if 'exclude_previous' in p.keys(): if 'exclude_previous' in p.keys():
exclude_previous = p['exclude_previous'] exclude_previous = p['exclude_previous']
@ -2101,11 +2188,49 @@ class LBRYDaemon(jsonrpc.JSONRPC):
else: else:
force = False force = False
else: else:
prefix = "api" log_type = "api"
exclude_previous = True exclude_previous = True
d = self._upload_log(name_prefix=prefix, exclude_previous=exclude_previous, force=force) d = self._upload_log(log_type=log_type, exclude_previous=exclude_previous, force=force)
if 'message' in p.keys(): if 'message' in p.keys():
d.addCallback(lambda _: self._log_to_slack(p['message'])) d.addCallback(lambda _: self._log_to_slack(p['message']))
d.addCallback(lambda _: self._render_response(True, OK_CODE)) d.addCallback(lambda _: self._render_response(True, OK_CODE))
return d return d
def jsonrpc_configure_ui(self, p):
"""
Configure the UI being hosted
Args, optional:
'branch': a branch name on lbryio/lbry-web-ui
'path': path to a ui folder
"""
if 'path' in p.keys():
d = self.lbry_ui_manager.setup(user_specified=p['path'])
elif 'branch' in p.keys():
d = self.lbry_ui_manager.setup(branch=p['branch'])
else:
d = self.lbry_ui_manager.setup()
d.addCallback(lambda r: self._render_response(r, OK_CODE))
return d
def jsonrpc_reveal(self, p):
"""
Open a folder in finder/file explorer
Args:
'path': path to be selected in finder
Returns:
True, opens finder
"""
path = p['path']
if sys.platform == "darwin":
d = threads.deferToThread(subprocess.Popen, ['open', '-R', path])
else:
d = threads.deferToThread(subprocess.Popen, ['xdg-open', '-R', path])
d.addCallback(lambda _: self._render_response(True, OK_CODE))
return d

View file

@ -13,7 +13,8 @@ from twisted.internet import reactor, defer
from jsonrpc.proxy import JSONRPCProxy from jsonrpc.proxy import JSONRPCProxy
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer
from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_ADDRESS, API_PORT, DEFAULT_WALLET, UI_ADDRESS from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_ADDRESS, API_PORT, \
DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH
if sys.platform != "darwin": if sys.platform != "darwin":
@ -68,12 +69,11 @@ def start():
help="path to custom UI folder", help="path to custom UI folder",
default=None) default=None)
parser.add_argument("--branch", parser.add_argument("--branch",
help="Branch of lbry-web-ui repo to use, defaults on master", help="Branch of lbry-web-ui repo to use, defaults on master")
default="master")
parser.add_argument('--no-launch', dest='launchui', action="store_false") parser.add_argument('--no-launch', dest='launchui', action="store_false")
parser.add_argument('--log-to-console', dest='logtoconsole', action="store_true") parser.add_argument('--log-to-console', dest='logtoconsole', action="store_true")
parser.add_argument('--quiet', dest='quiet', action="store_true") parser.add_argument('--quiet', dest='quiet', action="store_true")
parser.set_defaults(launchui=True, logtoconsole=False, quiet=False) parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False)
args = parser.parse_args() args = parser.parse_args()
if args.logtoconsole: if args.logtoconsole:
@ -104,7 +104,10 @@ def start():
if test_internet_connection(): if test_internet_connection():
lbry = LBRYDaemonServer() lbry = LBRYDaemonServer()
d = lbry.start(branch=args.branch, user_specified=args.ui, wallet=args.wallet) d = lbry.start(branch=args.branch if args.branch else DEFAULT_UI_BRANCH,
user_specified=args.ui,
wallet=args.wallet,
branch_specified=True if args.branch else False)
if args.launchui: if args.launchui:
d.addCallback(lambda _: webbrowser.open(UI_ADDRESS)) d.addCallback(lambda _: webbrowser.open(UI_ADDRESS))

View file

@ -18,7 +18,7 @@ from txjsonrpc.web import jsonrpc
from zope.interface import implements from zope.interface import implements
from lbrynet.lbrynet_daemon.LBRYDaemon import LBRYDaemon from lbrynet.lbrynet_daemon.LBRYDaemon import LBRYDaemon
from lbrynet.conf import API_CONNECTION_STRING, API_ADDRESS, DEFAULT_WALLET, UI_ADDRESS from lbrynet.conf import API_CONNECTION_STRING, API_ADDRESS, DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH
if sys.platform != "darwin": if sys.platform != "darwin":
@ -149,22 +149,23 @@ class HostedLBRYFile(resource.Resource):
self._producer = None self._producer = None
resource.Resource.__init__(self) resource.Resource.__init__(self)
def makeProducer(self, request, stream): # todo: fix LBRYFileStreamer and use it instead of static.File
def _save_producer(producer): # def makeProducer(self, request, stream):
self._producer = producer # def _save_producer(producer):
return defer.succeed(None) # self._producer = producer
# return defer.succeed(None)
range_header = request.getAllHeaders()['range'].replace('bytes=', '').split('-') #
start, stop = int(range_header[0]), range_header[1] # range_header = request.getAllHeaders()['range'].replace('bytes=', '').split('-')
log.info("[" + str(datetime.now()) + "] GET range %s-%s" % (start, stop)) # start, stop = int(range_header[0]), range_header[1]
path = os.path.join(self._api.download_directory, stream.file_name) # log.info("[" + str(datetime.now()) + "] GET range %s-%s" % (start, stop))
# path = os.path.join(self._api.download_directory, stream.file_name)
d = stream.get_total_bytes() #
d.addCallback(lambda size: _save_producer(LBRYFileStreamer(request, path, start, stop, size))) # d = stream.get_total_bytes()
d.addCallback(lambda _: request.registerProducer(self._producer, streaming=True)) # d.addCallback(lambda size: _save_producer(LBRYFileStreamer(request, path, start, stop, size)))
# request.notifyFinish().addCallback(lambda _: self._producer.stopProducing()) # d.addCallback(lambda _: request.registerProducer(self._producer, streaming=True))
request.notifyFinish().addErrback(self._responseFailed, d) # # request.notifyFinish().addCallback(lambda _: self._producer.stopProducing())
return d # request.notifyFinish().addErrback(self._responseFailed, d)
# return d
def render_GET(self, request): def render_GET(self, request):
if 'name' in request.args.keys(): if 'name' in request.args.keys():
@ -182,172 +183,22 @@ class HostedLBRYFile(resource.Resource):
request.finish() request.finish()
return server.NOT_DONE_YET return server.NOT_DONE_YET
def _responseFailed(self, err, call): # def _responseFailed(self, err, call):
call.addErrback(lambda err: err.trap(error.ConnectionDone)) # call.addErrback(lambda err: err.trap(error.ConnectionDone))
call.addErrback(lambda err: err.trap(defer.CancelledError)) # call.addErrback(lambda err: err.trap(defer.CancelledError))
call.addErrback(lambda err: log.info("Error: " + str(err))) # call.addErrback(lambda err: log.info("Error: " + str(err)))
call.cancel() # call.cancel()
class MyLBRYFiles(resource.Resource):
isLeaf = False
def __init__(self):
resource.Resource.__init__(self)
self.files_table = None
def delayed_render(self, request, result):
request.write(result.encode('utf-8'))
request.finish()
def render_GET(self, request):
self.files_table = None
api = jsonrpc.Proxy(API_CONNECTION_STRING)
d = api.callRemote("get_lbry_files", {})
d.addCallback(self._get_table)
d.addCallback(lambda results: self.delayed_render(request, results))
return server.NOT_DONE_YET
def _get_table(self, files):
if not self.files_table:
self.files_table = r'<html><head><title>My LBRY files</title></head><body><table border="1">'
self.files_table += r'<tr>'
self.files_table += r'<td>Stream name</td>'
self.files_table += r'<td>Completed</td>'
self.files_table += r'<td>Toggle</td>'
self.files_table += r'<td>Remove</td>'
self.files_table += r'</tr>'
return self._get_table(files)
if not len(files):
self.files_table += r'</table></body></html>'
return self.files_table
else:
f = files.pop()
self.files_table += r'<tr>'
self.files_table += r'<td>%s</td>' % (f['stream_name'])
self.files_table += r'<td>%s</td>' % (f['completed'])
self.files_table += r'<td>Start</td>' if f['stopped'] else r'<td>Stop</td>'
self.files_table += r'<td>Delete</td>'
self.files_table += r'</tr>'
return self._get_table(files)
class LBRYDaemonServer(object): class LBRYDaemonServer(object):
def __init__(self): def _setup_server(self, wallet):
self.data_dir = user_data_dir("LBRY") self.root = LBRYindex(os.path.join(os.path.join(data_dir, "lbry-ui"), "active"))
if not os.path.isdir(self.data_dir): self._api = LBRYDaemon(self.root, wallet_type=wallet)
os.mkdir(self.data_dir)
self.version_dir = os.path.join(self.data_dir, "ui_version_history")
if not os.path.isdir(self.version_dir):
os.mkdir(self.version_dir)
self.config = os.path.join(self.version_dir, "active.json")
self.ui_dir = os.path.join(self.data_dir, "lbry-web-ui")
self.git_version = None
self._api = None
self.root = None
if not os.path.isfile(os.path.join(self.config)):
self.loaded_git_version = None
else:
try:
f = open(self.config, "r")
loaded_ui = json.loads(f.read())
f.close()
self.loaded_git_version = loaded_ui['commit']
self.loaded_branch = loaded_ui['branch']
version_log.info("[" + str(datetime.now()) + "] Last used " + self.loaded_branch + " commit " + str(self.loaded_git_version).replace("\n", ""))
except:
self.loaded_git_version = None
self.loaded_branch = None
def setup(self, branch="master", user_specified=None):
self.branch = branch
if user_specified:
if os.path.isdir(user_specified):
log.info("Using user specified UI directory: " + str(user_specified))
self.branch = "user-specified"
self.loaded_git_version = "user-specified"
self.ui_dir = user_specified
return defer.succeed("user-specified")
else:
log.info("User specified UI directory doesn't exist, using " + branch)
else:
log.info("Using UI branch: " + branch)
self._git_url = "https://api.github.com/repos/lbryio/lbry-web-ui/git/refs/heads/%s" % branch
self._dist_url = "https://raw.githubusercontent.com/lbryio/lbry-web-ui/%s/dist.zip" % branch
d = self._up_to_date()
d.addCallback(lambda r: self._download_ui() if not r else self.branch)
return d
def _up_to_date(self):
def _get_git_info():
response = urlopen(self._git_url)
data = json.loads(response.read())
return defer.succeed(data['object']['sha'])
def _set_git(version):
self.git_version = version
version_log.info("[" + str(datetime.now()) + "] UI branch " + self.branch + " has a most recent commit of: " + str(self.git_version).replace("\n", ""))
if self.git_version == self.loaded_git_version and os.path.isdir(self.ui_dir):
version_log.info("[" + str(datetime.now()) + "] local copy of " + self.branch + " is up to date")
return defer.succeed(True)
else:
if self.git_version == self.loaded_git_version:
version_log.info("[" + str(datetime.now()) + "] Can't find ui files, downloading them again")
else:
version_log.info("[" + str(datetime.now()) + "] local copy of " + self.branch + " branch is out of date, updating")
f = open(self.config, "w")
f.write(json.dumps({'commit': self.git_version,
'time': str(datetime.now()),
'branch': self.branch}))
f.close()
return defer.succeed(False)
d = _get_git_info()
d.addCallback(_set_git)
return d
def _download_ui(self):
def _delete_ui_dir():
if os.path.isdir(self.ui_dir):
if self.loaded_git_version:
version_log.info("[" + str(datetime.now()) + "] Removed ui files for commit " + str(self.loaded_git_version).replace("\n", ""))
log.info("Removing out of date ui files")
shutil.rmtree(self.ui_dir)
return defer.succeed(None)
def _dl_ui():
url = urlopen(self._dist_url)
z = ZipFile(StringIO(url.read()))
names = [i for i in z.namelist() if '.DS_exStore' not in i and '__MACOSX' not in i]
z.extractall(self.ui_dir, members=names)
version_log.info("[" + str(datetime.now()) + "] Updated branch " + self.branch + ": " + str(self.loaded_git_version).replace("\n", "") + " --> " + self.git_version.replace("\n", ""))
log.info("Downloaded files for UI commit " + str(self.git_version).replace("\n", ""))
self.loaded_git_version = self.git_version
return self.branch
d = _delete_ui_dir()
d.addCallback(lambda _: _dl_ui())
return d
def _setup_server(self, ui_ver, wallet):
self._api = LBRYDaemon(ui_ver, wallet_type=wallet)
self.root = LBRYindex(self.ui_dir)
self.root.putChild("css", static.File(os.path.join(self.ui_dir, "css")))
self.root.putChild("font", static.File(os.path.join(self.ui_dir, "font")))
self.root.putChild("img", static.File(os.path.join(self.ui_dir, "img")))
self.root.putChild("js", static.File(os.path.join(self.ui_dir, "js")))
self.root.putChild("view", HostedLBRYFile(self._api)) self.root.putChild("view", HostedLBRYFile(self._api))
self.root.putChild("files", MyLBRYFiles())
self.root.putChild(API_ADDRESS, self._api) self.root.putChild(API_ADDRESS, self._api)
return defer.succeed(True) return defer.succeed(True)
def start(self, branch="master", user_specified=False, wallet=DEFAULT_WALLET): def start(self, branch=DEFAULT_UI_BRANCH, user_specified=False, branch_specified=False, wallet=DEFAULT_WALLET):
d = self.setup(branch=branch, user_specified=user_specified) d = self._setup_server(self._setup_server(wallet))
d.addCallback(lambda v: self._setup_server(v, wallet)) d.addCallback(lambda _: self._api.setup(branch, user_specified, branch_specified))
d.addCallback(lambda _: self._api.setup())
return d return d

View file

@ -0,0 +1,224 @@
import os
import logging
import shutil
import sys
import json
from urllib2 import urlopen
from StringIO import StringIO
from twisted.web import static
from twisted.internet import defer
from lbrynet.conf import DEFAULT_UI_BRANCH
from lbrynet import __version__ as lbrynet_version
from lbryum.version import ELECTRUM_VERSION as lbryum_version
from zipfile import ZipFile
from appdirs import user_data_dir
if sys.platform != "darwin":
data_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
data_dir = user_data_dir("LBRY")
if not os.path.isdir(data_dir):
os.mkdir(data_dir)
version_dir = os.path.join(data_dir, "ui_version_history")
if not os.path.isdir(version_dir):
os.mkdir(version_dir)
log = logging.getLogger(__name__)
log.addHandler(logging.FileHandler(os.path.join(data_dir, 'lbrynet-daemon.log')))
log.setLevel(logging.INFO)
class LBRYUIManager(object):
def __init__(self, root):
self.data_dir = user_data_dir("LBRY")
self.ui_root = os.path.join(self.data_dir, "lbry-ui")
self.active_dir = os.path.join(self.ui_root, "active")
self.update_dir = os.path.join(self.ui_root, "update")
if not os.path.isdir(self.data_dir):
os.mkdir(self.data_dir)
if not os.path.isdir(self.ui_root):
os.mkdir(self.ui_root)
if not os.path.isdir(self.ui_root):
os.mkdir(self.ui_root)
if not os.path.isdir(self.ui_root):
os.mkdir(self.ui_root)
self.config = os.path.join(self.ui_root, "active.json")
self.update_requires = os.path.join(self.update_dir, "requirements.txt")
self.requirements = {}
self.ui_dir = self.active_dir
self.git_version = None
self.root = root
if not os.path.isfile(os.path.join(self.config)):
self.loaded_git_version = None
self.loaded_branch = None
self.loaded_requirements = None
else:
try:
f = open(self.config, "r")
loaded_ui = json.loads(f.read())
f.close()
self.loaded_git_version = loaded_ui['commit']
self.loaded_branch = loaded_ui['branch']
self.loaded_requirements = loaded_ui['requirements']
except:
self.loaded_git_version = None
self.loaded_branch = None
self.loaded_requirements = None
def setup(self, branch=DEFAULT_UI_BRANCH, user_specified=None, branch_specified=False):
self.branch = branch
if user_specified:
if os.path.isdir(user_specified):
log.info("Checking user specified UI directory: " + str(user_specified))
self.branch = "user-specified"
self.loaded_git_version = "user-specified"
d = self.migrate_ui(source=user_specified)
d.addCallback(lambda _: self._load_ui())
return d
else:
log.info("User specified UI directory doesn't exist, using " + branch)
elif self.loaded_branch == "user-specified" and not branch_specified:
log.info("Loading user provided UI")
d = self._load_ui()
return d
else:
log.info("Checking for updates for UI branch: " + branch)
self._git_url = "https://api.github.com/repos/lbryio/lbry-web-ui/git/refs/heads/%s" % branch
self._dist_url = "https://raw.githubusercontent.com/lbryio/lbry-web-ui/%s/dist.zip" % branch
d = self._up_to_date()
d.addCallback(lambda r: self._download_ui() if not r else self._load_ui())
return d
def _up_to_date(self):
def _get_git_info():
response = urlopen(self._git_url)
data = json.loads(response.read())
return defer.succeed(data['object']['sha'])
def _set_git(version):
self.git_version = version.replace('\n', '')
if self.git_version == self.loaded_git_version:
log.info("UI is up to date")
return defer.succeed(True)
else:
log.info("UI updates available, checking if installation meets requirements")
return defer.succeed(False)
d = _get_git_info()
d.addCallback(_set_git)
return d
def migrate_ui(self, source=None):
if not source:
requires_file = self.update_requires
source_dir = self.update_dir
delete_source = True
else:
requires_file = os.path.join(source, "requirements.txt")
source_dir = source
delete_source = False
def _check_requirements():
if not os.path.isfile(requires_file):
log.info("No requirements.txt file, rejecting request to migrate this UI")
return defer.succeed(False)
f = open(requires_file, "r")
for requirement in [line for line in f.read().split('\n') if line]:
t = requirement.split('=')
if len(t) == 3:
self.requirements[t[0]] = {'version': t[1], 'operator': '=='}
elif t[0][-1] == ">":
self.requirements[t[0][:-1]] = {'version': t[1], 'operator': '>='}
elif t[0][-1] == "<":
self.requirements[t[0][:-1]] = {'version': t[1], 'operator': '<='}
f.close()
passed_requirements = True
for r in self.requirements:
if r == 'lbrynet':
c = lbrynet_version
elif r == 'lbryum':
c = lbryum_version
else:
c = None
if c:
if self.requirements[r]['operator'] == '==':
if not self.requirements[r]['version'] == c:
passed_requirements = False
log.info("Local version %s of %s does not meet UI requirement for version %s" % (
c, r, self.requirements[r]['version']))
else:
log.info("Local version of %s meets ui requirement" % r)
if self.requirements[r]['operator'] == '>=':
if not self.requirements[r]['version'] <= c:
passed_requirements = False
log.info("Local version %s of %s does not meet UI requirement for version %s" % (
c, r, self.requirements[r]['version']))
else:
log.info("Local version of %s meets ui requirement" % r)
if self.requirements[r]['operator'] == '<=':
if not self.requirements[r]['version'] >= c:
passed_requirements = False
log.info("Local version %s of %s does not meet UI requirement for version %s" % (
c, r, self.requirements[r]['version']))
else:
log.info("Local version of %s meets ui requirement" % r)
return defer.succeed(passed_requirements)
def _disp_failure():
log.info("Failed to satisfy requirements for branch '%s', update was not loaded" % self.branch)
return defer.succeed(False)
def _do_migrate():
if os.path.isdir(self.active_dir):
shutil.rmtree(self.active_dir)
shutil.copytree(source_dir, self.active_dir)
if delete_source:
shutil.rmtree(source_dir)
log.info("Loaded UI update")
f = open(self.config, "w")
loaded_ui = {'commit': self.git_version, 'branch': self.branch, 'requirements': self.requirements}
f.write(json.dumps(loaded_ui))
f.close()
self.loaded_git_version = loaded_ui['commit']
self.loaded_branch = loaded_ui['branch']
self.loaded_requirements = loaded_ui['requirements']
return defer.succeed(True)
d = _check_requirements()
d.addCallback(lambda r: _do_migrate() if r else _disp_failure())
return d
def _download_ui(self):
def _delete_update_dir():
if os.path.isdir(self.update_dir):
shutil.rmtree(self.update_dir)
return defer.succeed(None)
def _dl_ui():
url = urlopen(self._dist_url)
z = ZipFile(StringIO(url.read()))
names = [i for i in z.namelist() if '.DS_exStore' not in i and '__MACOSX' not in i]
z.extractall(self.update_dir, members=names)
log.info("Downloaded files for UI commit " + str(self.git_version).replace("\n", ""))
return self.branch
d = _delete_update_dir()
d.addCallback(lambda _: _dl_ui())
d.addCallback(lambda _: self.migrate_ui())
d.addCallback(lambda _: self._load_ui())
return d
def _load_ui(self):
for d in [i[0] for i in os.walk(self.active_dir) if os.path.dirname(i[0]) == self.active_dir]:
self.root.putChild(os.path.basename(d), static.File(d))
return defer.succeed(True)

View file

@ -0,0 +1,68 @@
import json
import logging.handlers
import sys
import os
from appdirs import user_data_dir
from twisted.internet.task import LoopingCall
from twisted.internet import reactor
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
log_dir = user_data_dir("LBRY")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
if os.path.isfile(LOG_FILENAME):
f = open(LOG_FILENAME, 'r')
PREVIOUS_LOG = len(f.read())
f.close()
else:
PREVIOUS_LOG = 0
log = logging.getLogger(__name__)
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5)
log.addHandler(handler)
log.setLevel(logging.INFO)
class Autofetcher(object):
"""
Download name claims as they occur
"""
def __init__(self, api):
self._api = api
self._checker = LoopingCall(self._check_for_new_claims)
self.best_block = None
def start(self):
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
self._checker.start(5)
def stop(self):
log.info("Stopping autofetcher")
self._checker.stop()
def _check_for_new_claims(self):
block = self._api.get_best_blockhash()
if block != self.best_block:
log.info("Checking new block for name claims, block hash: %s" % block)
self.best_block = block
transactions = self._api.get_block({'blockhash': block})['tx']
for t in transactions:
c = self._api.get_claims_for_tx({'txid': t})
if len(c):
for i in c:
log.info("Downloading stream for claim txid: %s" % t)
self._api.get({'name': t, 'stream_info': json.loads(i['value'])})
def run(api):
fetcher = Autofetcher(api)
fetcher.start()

View file

@ -0,0 +1,33 @@
from twisted.internet import defer
class migrator(object):
"""
Re-resolve lbry names to write missing data to blockchain.db and to cache the nametrie
"""
def __init__(self, api):
self._api = api
def start(self):
def _resolve_claims(claimtrie):
claims = [i for i in claimtrie if 'txid' in i.keys()]
r = defer.DeferredList([self._api._resolve_name(claim['name'], force_refresh=True) for claim in claims], consumeErrors=True)
return r
def _restart_lbry_files():
def _restart_lbry_file(lbry_file):
return lbry_file.restore()
r = defer.DeferredList([_restart_lbry_file(lbry_file) for lbry_file in self._api.lbry_file_manager.lbry_files if not lbry_file.txid], consumeErrors=True)
r.callback(None)
return r
d = self._api.session.wallet.get_nametrie()
d.addCallback(_resolve_claims)
d.addCallback(lambda _: _restart_lbry_files())
def run(api):
refresher = migrator(api)
refresher.start()

View file

@ -0,0 +1,160 @@
import json
import logging
import os
import sys
from appdirs import user_data_dir
from datetime import datetime
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from lbrynet.core.Error import InvalidStreamInfoError, InsufficientFundsError
from lbrynet.core.PaymentRateManager import PaymentRateManager
from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloaderFactory
from lbrynet.conf import DEFAULT_TIMEOUT
INITIALIZING_CODE = 'initializing'
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
DOWNLOAD_TIMEOUT_CODE = 'timeout'
DOWNLOAD_RUNNING_CODE = 'running'
DOWNLOAD_STOPPED_CODE = 'stopped'
STREAM_STAGES = [
(INITIALIZING_CODE, 'Initializing...'),
(DOWNLOAD_METADATA_CODE, 'Downloading metadata'),
(DOWNLOAD_RUNNING_CODE, 'Started stream'),
(DOWNLOAD_STOPPED_CODE, 'Paused stream'),
(DOWNLOAD_TIMEOUT_CODE, 'Stream timed out')
]
if sys.platform != "darwin":
log_dir = os.path.join(os.path.expanduser("~"), ".lbrynet")
else:
log_dir = user_data_dir("LBRY")
if not os.path.isdir(log_dir):
os.mkdir(log_dir)
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
log = logging.getLogger(__name__)
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5)
log.addHandler(handler)
log.setLevel(logging.INFO)
class GetStream(object):
def __init__(self, sd_identifier, session, wallet, lbry_file_manager, max_key_fee, pay_key=True, data_rate=0.5,
timeout=DEFAULT_TIMEOUT, download_directory=None, file_name=None):
self.wallet = wallet
self.resolved_name = None
self.description = None
self.key_fee = None
self.key_fee_address = None
self.data_rate = data_rate
self.pay_key = pay_key
self.name = None
self.file_name = file_name
self.session = session
self.payment_rate_manager = PaymentRateManager(self.session.base_payment_rate_manager)
self.lbry_file_manager = lbry_file_manager
self.sd_identifier = sd_identifier
self.stream_hash = None
self.max_key_fee = max_key_fee
self.stream_info = None
self.stream_info_manager = None
self.d = defer.Deferred(None)
self.timeout = timeout
self.timeout_counter = 0
self.download_directory = download_directory
self.download_path = None
self.downloader = None
self.finished = defer.Deferred()
self.checker = LoopingCall(self.check_status)
self.code = STREAM_STAGES[0]
def check_status(self):
self.timeout_counter += 1
if self.download_path:
self.checker.stop()
self.finished.callback((self.stream_hash, self.download_path))
elif self.timeout_counter >= self.timeout:
log.info("Timeout downloading lbry://" + self.resolved_name + ", " + str(self.stream_info))
self.checker.stop()
self.d.cancel()
self.code = STREAM_STAGES[4]
self.finished.callback(False)
def start(self, stream_info, name):
self.resolved_name = name
self.stream_info = stream_info
if 'stream_hash' in self.stream_info.keys():
self.stream_hash = self.stream_info['stream_hash']
elif 'sources' in self.stream_info.keys():
self.stream_hash = self.stream_info['sources']['lbry_sd_hash']
else:
raise InvalidStreamInfoError(self.stream_info)
if 'description' in self.stream_info.keys():
self.description = self.stream_info['description']
if 'key_fee' in self.stream_info.keys():
self.key_fee = float(self.stream_info['key_fee'])
if 'key_fee_address' in self.stream_info.keys():
self.key_fee_address = self.stream_info['key_fee_address']
else:
self.key_fee_address = None
else:
self.key_fee = None
self.key_fee_address = None
if self.key_fee > self.max_key_fee:
if self.pay_key:
log.info("Key fee (" + str(self.key_fee) + ") above limit of " + str(
self.max_key_fee) + ", didn't download lbry://" + str(self.resolved_name))
return defer.fail(None)
else:
pass
def _cause_timeout():
self.timeout_counter = self.timeout * 2
def _set_status(x, status):
self.code = next(s for s in STREAM_STAGES if s[0] == status)
return x
self.checker.start(1)
self.d.addCallback(lambda _: _set_status(None, DOWNLOAD_METADATA_CODE))
self.d.addCallback(lambda _: download_sd_blob(self.session, self.stream_hash, self.payment_rate_manager))
self.d.addCallback(self.sd_identifier.get_metadata_for_sd_blob)
self.d.addCallback(lambda r: _set_status(r, DOWNLOAD_RUNNING_CODE))
self.d.addCallback(lambda metadata: (
next(factory for factory in metadata.factories if isinstance(factory, ManagedLBRYFileDownloaderFactory)),
metadata))
self.d.addCallback(lambda (factory, metadata): factory.make_downloader(metadata,
[self.data_rate, True],
self.payment_rate_manager,
download_directory=self.download_directory,
file_name=self.file_name))
self.d.addCallbacks(self._start_download, lambda _: _cause_timeout())
self.d.callback(None)
return self.finished
def _start_download(self, downloader):
def _pay_key_fee():
if self.key_fee is not None and self.key_fee_address is not None:
reserved_points = self.wallet.reserve_points(self.key_fee_address, self.key_fee)
if reserved_points is None:
return defer.fail(InsufficientFundsError())
log.info("Key fee: " + str(self.key_fee) + " | " + str(self.key_fee_address))
return self.wallet.send_points_to_address(reserved_points, self.key_fee)
return defer.succeed(None)
if self.pay_key:
d = _pay_key_fee()
else:
d = defer.Deferred()
self.downloader = downloader
self.download_path = os.path.join(downloader.download_directory, downloader.file_name)
d.addCallback(lambda _: log.info("[" + str(datetime.now()) + "] Downloading " + str(self.stream_hash) + " --> " + str(self.download_path)))
d.addCallback(lambda _: self.downloader.start())

View file

@ -176,10 +176,10 @@ class DownloaderApp(object):
style="Stop.TButton", cursor=button_cursor) style="Stop.TButton", cursor=button_cursor)
self.wallet_menu_button.grid(row=0, column=1, padx=(5, 0)) self.wallet_menu_button.grid(row=0, column=1, padx=(5, 0))
def popup(event): def popup_wallet(event):
self.wallet_menu.tk_popup(event.x_root, event.y_root) self.wallet_menu.tk_popup(event.x_root, event.y_root)
self.wallet_menu_button.bind("<Button-1>", popup) self.wallet_menu_button.bind("<Button-1>", popup_wallet)
self.uri_frame = ttk.Frame(self.frame, style="B.TFrame") self.uri_frame = ttk.Frame(self.frame, style="B.TFrame")
self.uri_frame.grid() self.uri_frame.grid()
@ -204,7 +204,7 @@ class DownloaderApp(object):
def paste_command(): def paste_command():
self.uri_entry.event_generate('<Control-v>') self.uri_entry.event_generate('<Control-v>')
def popup(event): def popup_uri(event):
selection_menu = tk.Menu( selection_menu = tk.Menu(
self.master, tearoff=0 self.master, tearoff=0
) )
@ -214,7 +214,7 @@ class DownloaderApp(object):
selection_menu.add_command(label=" Paste ", command=paste_command) selection_menu.add_command(label=" Paste ", command=paste_command)
selection_menu.tk_popup(event.x_root, event.y_root) selection_menu.tk_popup(event.x_root, event.y_root)
self.uri_entry.bind("<Button-3>", popup) self.uri_entry.bind("<Button-3>", popup_uri)
self.uri_button = ttk.Button( self.uri_button = ttk.Button(
self.uri_frame, text="Go", command=self._open_stream, self.uri_frame, text="Go", command=self._open_stream,

View file

@ -118,7 +118,7 @@ class StreamFrame(object):
def cancel(self): def cancel(self):
if self.cancel_func is not None: if self.cancel_func is not None:
self.cancel_func() self.cancel_func() # pylint: disable=not-callable
self.stream_frame.destroy() self.stream_frame.destroy()
self.app.stream_removed() self.app.stream_removed()

View file

@ -0,0 +1,30 @@
U2FsdGVkX1/oLoj7zhPd0imh2T8RhLpKzZLk4EHQV0GUJ1g8nwGvxWov+CUMnmjh
Y+LNdomGBYWoUilhe4JaWDUYepwIXn6+TvuWBdVEMGpJhXGbmIf+ncMXo6AP8Fh/
g9x79SE4RJxFj3utc02B2ivVehoQno5sEvNSZMVml5n9skJoJUBbAsbp1p+7Hm5j
p2Z7UI7/qiih6TmszX5KQvOl/DPezVNksn1c1mUShuaBTjxbprGlr/LvtboR3mAd
8DN4yTGLLJAQ2+FNftM4rAedrr6db2AhQ8WxRmiwTfdubnEoC6zYySFq2kmdjj3S
gPnK0atx+ZihMp+S+GqMnvfAHEtb0vqxoq6nFcSIvcQVxKPyzu5E1kMriY4Oq3xr
K6ebc1NKJHjh7niaUmR3NImBx2h1LAAf/hcKRH2+ATEVczGtI1AsSGgGhUM34eGH
7G+m7+bIkgb8AtlaIGS/VVHsIZCNSgzwZJoNd3hD6ZV65Hb2yeT6Hhos88/05iFT
ewYasa73TqFm5SJHRwt4d1W9WVIJKJPDJ910p+V+NZVUsKOx34+vMNrjCrqW9p9x
gQnza2V/F6blIHTbSzIGc+MFbeHYBO80d+v5jVxheL8z6ollDVts1SyJ5rKJBY6c
quvSgmc/ltE0dqRxLOQJ9mAFbayuMIUP6CbRkPXp8GfE55UtUJkDilalzcpCPrUC
YJpuAI61INOQZZPEVKWW8L68/tLY+oEwWpexQX7xs4FUCblIFf20T3XE2lVuBHf9
Bp9k7cD2m4mNrbzWOJuqrVt1pr176l9+VSP/ESdDFbmPch2FHl8HK8kgfJvkV+iB
kudmAmzI9DTUpWd5lJp6Fr/rLCMjslFDs37zMg4/E5ikKFSDNeYMtgPZhCwM83kh
OAktow4QAzh3RdbVZMFxaKk9nbiGPuBEsgvraPjb2gY8U34RC9R2FINIuTnJttLK
q7CKFTdbJIf+TIIgzfNu/c978adsK/qS68iltyyx8WFflcybnlqVgja192Ptqw1M
PXBQkH4mUrAeWDfmCPPh/mhO67Bau5u9Wzv/qZ2RXcX0dgXOoMa2sO6ZpR2SzxCJ
/XZwXnElMl+pvojLURDOV16fMPpjMCbzCN+hQabiTASqFNCsz4C9hmOquNh2t+V9
8xvU/bnOM+/SMhahjYnvdhmRMcY+5Wv32ZnKATq88dq4T7/OZI7q3IsROZ7MnucT
x4vADvcFOfOdtPK35IFfMTfl+Ri3q7REIHMts2WEwXddf8CUiVeIaf8NgrWYW0hP
f9DQbMGKFcqqCHlKrQkv2dBKX/qEbIzN7T7535Ly68zyFuBS252gsLO7nrf+CLEZ
AROOfmt2jv0BvQ4MI5dslzsXFAU11tS36gOZ303R+NJVVqySkza964h2rH5M1F7i
A5p7w/l0OVV7r6aXkmsrIcsUZuY7QnZJORQ1MxNtK20weKfrqs90nMTklUVPc4V8
LnAW6AYem0ZaeDHn2kx947sglMYxf0h/mFECGhif9hfDTErw7TkSJ26t9ByuEyEf
vGpp3P4iTXHUx7HSh7L4KDva6CP6slGjFMAFUEETn7N5uX3VEYeztMBdHLz0XHZc
PcgVZ8kytXVTEg95upvWmliEbQqWRsy6sr9PanaN1QY6re6RLlYj4pOWVm8qgCXU
IJVTWkROMlYZTWCibCsTsY8fk8aNObZamHjzZGvnU8nEGTx7xQJS8i0r3NM1j2Ka
ehBA+WfXbTplI/Fq8Z2Nrb/O39hQpGbXp4ERsEmVbK0twwsqVNehI0CdobxmGsd5
E9Afb9iL97DTXsna1Il6FXnHvj3iAQsxxaNLIaj0sN1GaQd9N1mbxThlFNOM3wry
jI8TKCWEfLRQzykkcR3sMg==

View file

@ -0,0 +1,33 @@
U2FsdGVkX1+DAD1J9fegD2PjAVffLjKB5urEZYVfRRsZ9uCYeGggOyopseTFPACo
IGBkauMQ1lrQWSltYzDzbzPdhe02w6xWHx8hh9QRepSSWlTUHjIxr8A1GryZo7a8
4dLs4qxjQDcDdp+csOrBqm3AKS4oeVFRXWxvmr2AueUQ/CEyvhAR1wS3XZ1L0Pod
6XJWAhDIPtT9zfSQbCiVvHtjK7VxVjIMv9VwDfE2Gny/otaNf9Wuor6luiDMF3Z8
H6X5yh/mkmNZvI/bcOrCmGUkDEVvw/pessdZwwTIdNSzkBE8GqC9Oc5jdOMpW7J1
afyZDslB1SaNXm/9HDPnl67guZRUM1j6QJxBwIyj8vUhygcG4J6HOAQrWi61ebSX
5ZZrNddMycVRDhE1GphhRPJm7S/v8aeASc8dlAy3ircERJXIO/WhWpdysKgVB8/u
wtc6TVK2MUD0CvmG7hatqCQcwsgK7Goy7zFN4kkNviAegxpm5OAmEtHH5SrlHXWI
CmMAZyNAkwmcAPwviXXaSSA9z/A++KDqQHbRJJKz/fFZ98OsVs64zPwMT3uMAp2E
FiBkCqpxj6q0EFHJwhpNtEPdPF62T9CzcV2CoNr1qyDS7UqlKBi2vkGHNALtBqbm
69rN3ESpjhRzK4pbRFBM0R73JWVW8LM/jWIOFOPh1qd5yKNALKGqw4sEtZ96YJju
Y4tP17+kRknzgSVn6zuUSg/wznIVs+eQ9eYQVd+T70XDUGe2PfQTRm3bz/8W7m8u
tDqE/yhgBJDXuc0zlmXxXxH4cXEhKPA2ScrEw974nWKWrNgtmN+skaJVQELFqVm8
47amfobRAsp/l0+d86shUg9QC3XzrI/jkPPpKsQUKoYF1OULpXwjMJs7o0e/Ajo6
S32DWVMqHfhd/M1LBUSFqLb802Y+qFVOXRSJOV2VEqfplbsnEPnmkBrUjVT4y6x6
HxxqPq5IQM6qLK9TCPXbYCzp3knWim8A5jDFXYNHHeTkuA1xbpkM4lCas64pYV9V
fkokG4fdFM09oileakOxt0iz0DJjXlb/XZLOvuhMeAWPcJC9UTrmMUdXCBgem3Nk
vT40dxCxMK3EREM8dvbNndC7sg9mVJ6dRY7+inDnhhdGhy9FM592lBvFDTS9oJm0
ZX+0FeDvIGnG1kEIYSrBhCP/9X++6EzF+YzO1zo2YXtVlP2JT/9cD5g6SajvI1+5
pdv2zzdFRfEKDpJ8bRDr6iMJLCmllWSWkeSE2VNo30+atCorc5/6vfjD/BOJtZDj
vUxPsZxulxiNp24YwDBJ+B+uid8x6xC7h1hId9QF51wUA54AzHRtypAuAOVHjdyj
W+EkCpic1eDyFMVhfy7hB/Ef9lpvuQsKfmvTu3ege8TOMQBeaKmlKBAIyGeTcTH/
vRz/UAYXEzTRNWkfCFZQ6oucVWSSUxX53DnvD4NcT0AX7+kRY+bhZcZW/nc/NEqN
Tzs3Zv9N9h3M618FK/mqSvhqxukMIRXRhyiISEQyAJtm0SuMu9SXG9Q+G766KOWm
+votjNrHQKIojPI3BcbFHCfXET5qPoUQVPw3M5Av0E3Tm36ZAdl+bhl852H9Vf2M
TprNFmr4U/sljyetEpywG1aEzxijISCflFNBZrqMIwcdYdduLCKPcMNtqSpFiXLV
WtDPBvoz4XldIkZIA+70oBqCwJchILI5ujlo1haF7/ILIK5aynITu2zoaDE6gtE8
VFl30aGF1uRKYYle8E+RLxv5ID/xFuPlNsBQ3ZsfNbsE9GEoVFmTTGneN+wuTl7G
NNRdyjv7Py3zgC1sqA6cmzRJkgX+CGKm3aCJTvflDKYVGRpmphsYWLqZp7i12Noj
/eHzfYkMU2uOh50IUls8l2fYRlkwPuMQxVtn2g7/3dUXna8zQ0LSqAPRf8zZAszx
nGG1kwpYyJ4YknC8oKhnt3LZWfmAEJFRNSYHDTbBncynqADoUB6EH5j5qcdI/pFG
lsrrw+lbCPbN7dDbbbg685ESKI4WZ7j0zkJIrDWdSFYCitmo437h+t9AcWBF5SEd
vOtCHu46xXuBJbDmz2mslw==

View file

@ -0,0 +1,46 @@
#!/bin/bash
#
# This script is used by travis to install lbry from source
#
set -euo pipefail
set -o xtrace
SUDO=''
if (( $EUID != 0 )); then
SUDO='sudo'
fi
if [ -z ${TRAVIS+x} ]; then
# if not on travis, its nice to see progress
QUIET=""
else
QUIET="-qq"
fi
# get the required OS packages
$SUDO apt-get ${QUIET} update
$SUDO apt-get ${QUIET} install -y --no-install-recommends \
build-essential python-dev libffi-dev libssl-dev git \
libgmp3-dev wget ca-certificates python-virtualenv
# create a virtualenv so we don't muck with anything on the system
virtualenv venv
# need to unset these or else we can't activate
set +eu
source venv/bin/activate
set -eu
# need a modern version of pip (more modern than ubuntu default)
wget https://bootstrap.pypa.io/get-pip.py
python get-pip.py
rm get-pip.py
pip install -r requirements.txt
pip install nose coverage coveralls pylint
nosetests --with-coverage --cover-package=lbrynet -v -I functional_tests.py tests/
# TODO: submit coverage report to coveralls
# TODO: as code quality improves, make pylint be more strict
pylint -E --disable=inherit-non-class --disable=no-member lbrynet

View file

@ -1,25 +0,0 @@
from setuptools import setup
import os
APP = [os.path.join('lbrynet', 'lbrynet_daemon', 'Apps', 'LBRYURIHandler.py')]
DATA_FILES = []
OPTIONS = {'argv_emulation': True,
'packages': ['jsonrpc'],
'plist': {
'LSUIElement': True,
'CFBundleIdentifier': 'io.lbry.LBRYURIHandler',
'CFBundleURLTypes': [
{
'CFBundleURLTypes': 'LBRYURIHandler',
'CFBundleURLSchemes': ['lbry']
}
]
}
}
setup(
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)

View file

@ -1,60 +0,0 @@
import xmlrpclib
import json
from datetime import datetime
from time import sleep
from slackclient import SlackClient
def get_conf():
f = open('testbot.conf', 'r')
token = f.readline().replace('\n', '')
f.close()
return token
def test_lbrynet(lbry, slack, channel):
logfile = open('lbrynet_test_log.txt', 'a')
try:
path = lbry.get('testlbrynet')['path']
except:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
file_name = path.split('/')[len(path.split('/'))-1]
for n in range(10):
files = [f for f in lbry.get_lbry_files() if (json.loads(f)['file_name'] == file_name) and json.loads(f)['completed']]
if files:
break
sleep(30)
if files:
msg = '[' + str(datetime.now()) + '] LBRYnet download test successful'
slack.rtm_connect()
# slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
else:
msg = '[' + str(datetime.now()) + '] ! Failed to obtain LBRYnet test file'
slack.rtm_connect()
slack.rtm_send_message(channel, msg)
print msg
logfile.write(msg + '\n')
lbry.delete_lbry_file('test.jpg')
logfile.close()
token = get_conf()
sc = SlackClient(token)
sc.rtm_connect()
channel = [c['id'] for c in json.loads(sc.api_call('channels.list'))['channels'] if c['name'] == 'tech'][0]
print 'Connected to slack'
daemon = xmlrpclib.ServerProxy("http://localhost:7080")
while True:
test_lbrynet(daemon, sc, channel)
sleep(600)