Merge pull request #12 from lbryio/master

Update
This commit is contained in:
Dave-A 2016-09-01 10:30:23 -04:00 committed by GitHub
commit c7c32fd4dc
18 changed files with 782 additions and 591 deletions

144
.appveyor.yml Normal file
View file

@ -0,0 +1,144 @@
version: 1.0.{build}
environment:
key_pass:
secure: UQOSEsEsXFmZvp1k18+7Lbd19NNPEEniq2euQgU0giz3OYHu0qWvQP9qecUayCtagAeG4C75RfwChkS2r+DjWZaePnr2OtPK46sM2pjXTx4=
pfx_key:
secure: qBgsWfnk50nFMs/Ft9T6lRx11eudnE17Z+paEtSJJ7tdn9F29Huy4rrw1bDjVC9LrmYch+09hZJm9WHGYuc4YuWPFBlvtngGqYsYD4r2gnQ=
notifications:
- provider: Slack
incoming_webhook:
secure: SyfpZAVQA8PEPdjqqc6Yav3i5wNOCFxNcoHx3oJPYSLs13PdDLpSPKPiYJgxTYogspCIFcskc8nMUkXC0s7P8AWCzTKtusjKchd8D+pJR2I=
branches:
only:
- master
- appveyor
- windows-setup
clone_folder: c:\projects\lbry
init:
- ps: >-
$env:Path += ";C:\MinGW\bin\"
$env:Path += ";C:\Program Files (x86)\Windows Kits\10\bin\x86\"
gcc --version
mingw32-make --version
mkdir C:\temp
Invoke-WebRequest "https://pypi.python.org/packages/55/90/e987e28ed29b571f315afea7d317b6bf4a551e37386b344190cffec60e72/miniupnpc-1.9.tar.gz" -OutFile "C:\temp\miniupnpc-1.9.tar.gz"
cd C:\temp
7z e miniupnpc-1.9.tar.gz
7z x miniupnpc-1.9.tar
cd C:\temp\miniupnpc-1.9
mingw32-make.exe -f Makefile.mingw
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py build --compiler=mingw32
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py install
Invoke-WebRequest "https://github.com/lbryio/lbry/raw/master/packaging/windows/libs/gmpy-1.17-cp27-none-win32.whl" -OutFile "C:\temp\gmpy-1.17-cp27-none-win32.whl"
C:\Python27\Scripts\pip.exe install "C:\temp\gmpy-1.17-cp27-none-win32.whl"
C:\Python27\Scripts\pip.exe install pypiwin32==219
C:\Python27\Scripts\pip.exe install six==1.9.0
C:\Python27\Scripts\pip.exe install requests==2.9.1
C:\Python27\Scripts\pip.exe install zope.interface==4.1.3
C:\Python27\Scripts\pip.exe install cx-freeze==4.3.3
C:\Python27\Scripts\pip.exe install cython==0.24.1
C:\Python27\Scripts\pip.exe install Twisted==16.0.0
C:\Python27\Scripts\pip.exe install Yapsy==1.11.223
C:\Python27\Scripts\pip.exe install appdirs==1.4.0
C:\Python27\Scripts\pip.exe install argparse==1.2.1
C:\Python27\Scripts\pip.exe install colorama==0.3.7
C:\Python27\Scripts\pip.exe install dnspython==1.12.0
C:\Python27\Scripts\pip.exe install ecdsa==0.13
C:\Python27\Scripts\pip.exe install jsonrpc==1.2
C:\Python27\Scripts\pip.exe install jsonrpclib==0.1.7
C:\Python27\Scripts\pip.exe install loggly-python-handler==1.0.0
C:\Python27\Scripts\pip.exe install pbkdf2==1.3
C:\Python27\Scripts\pip.exe install protobuf==3.0.0
C:\Python27\Scripts\pip.exe install pycrypto==2.6.1
C:\Python27\Scripts\pip.exe install python-bitcoinrpc==0.1
C:\Python27\Scripts\pip.exe install qrcode==5.2.2
C:\Python27\Scripts\pip.exe install requests_futures==0.9.7
C:\Python27\Scripts\pip.exe install seccure==0.3.1.3
C:\Python27\Scripts\pip.exe install simplejson==3.8.2
C:\Python27\Scripts\pip.exe install slowaes==0.1a1
C:\Python27\Scripts\pip.exe install txJSON-RPC==0.3.1
C:\Python27\Scripts\pip.exe install unqlite==0.5.3
C:\Python27\Scripts\pip.exe install wsgiref==0.1.2
C:\Python27\Scripts\pip.exe install base58==0.2.2
C:\Python27\Scripts\pip.exe install googlefinance==0.7
C:\Python27\Scripts\pip.exe install git+https://github.com/lbryio/lbryum.git
cd C:\projects\lbry
install:
- nuget install secure-file -ExcludeVersion
- secure-file\tools\secure-file -decrypt packaging\windows\certs\lbry2.pfx.enc -secret %pfx_key%
- cmd: C:\Python27\python.exe setup.py install
build_script:
- ps: C:\Python27\python.exe setup.py build bdist_msi
- signtool.exe sign /f packaging\windows\certs\lbry2.pfx /p %key_pass% /tr http://tsa.starfieldtech.com /td SHA256 /fd SHA256 dist\*.msi
test_script:
- ps: >-
C:\Python27\Scripts\pip.exe install mock
C:\Python27\Scripts\pip.exe install pylint
C:\Python27\python.exe C:\Python27\Scripts\trial.py C:\projects\lbry\tests
artifacts:
- path: dist/*.msi
name: msi
- path: build/exe.win32-2.7/
name: lbry-portable
- path: packaging/windows/lbry-win32-app/LBRY-URI.reg
name: LBRY-URI

View file

@ -1,5 +1,5 @@
[bumpversion] [bumpversion]
current_version = 0.4.1 current_version = 0.4.3
commit = True commit = True
tag = True tag = True

View file

@ -1,2 +1 @@
include lbrynet/lbrynet_console/plugins/blindrepeater.yapsy-plugin include lbrynet/lbrynet_console/plugins/blindrepeater.yapsy-plugin
recursive-include lbrynet/lbrynet_gui *.gif *.ico *.xbm *.conf

View file

@ -1,63 +0,0 @@
version: 1.0.{build}
init:
- ps: $env:Path += ";C:\MinGW\bin\"
- ps: gcc --version
- ps: mingw32-make --version
- ps: mkdir C:\temp
- ps: Invoke-WebRequest "https://pypi.python.org/packages/55/90/e987e28ed29b571f315afea7d317b6bf4a551e37386b344190cffec60e72/miniupnpc-1.9.tar.gz" -OutFile "C:\temp\miniupnpc-1.9.tar.gz"
- ps: cd C:\temp
- ps: 7z e miniupnpc-1.9.tar.gz
- ps: 7z x miniupnpc-1.9.tar
- ps: cd C:\temp\miniupnpc-1.9
- ps: |
mingw32-make.exe -f Makefile.mingw
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py build --compiler=mingw32
C:\Python27\python.exe C:\temp\miniupnpc-1.9\setupmingw32.py install
- ps: Invoke-WebRequest "https://github.com/lbryio/lbry/raw/master/packaging/windows/libs/gmpy-1.17-cp27-none-win32.whl" -OutFile "C:\temp\gmpy-1.17-cp27-none-win32.whl"
- ps: C:\Python27\Scripts\pip.exe install "C:\temp\gmpy-1.17-cp27-none-win32.whl"
- ps: C:\Python27\Scripts\pip.exe install pypiwin32==219
- ps: C:\Python27\Scripts\pip.exe install six==1.9.0
- ps: C:\Python27\Scripts\pip.exe install requests==2.9.1
- ps: C:\Python27\Scripts\pip.exe install zope.interface==4.1.3
- ps: C:\Python27\Scripts\pip.exe install cx-freeze==4.3.3
- ps: C:\Python27\Scripts\pip.exe install cython==0.24.1
- ps: C:\Python27\Scripts\pip.exe install Twisted==16.0.0
- ps: C:\Python27\Scripts\pip.exe install Yapsy==1.11.223
- ps: C:\Python27\Scripts\pip.exe install appdirs==1.4.0
- ps: C:\Python27\Scripts\pip.exe install argparse==1.2.1
- ps: C:\Python27\Scripts\pip.exe install colorama==0.3.7
- ps: C:\Python27\Scripts\pip.exe install dnspython==1.12.0
- ps: C:\Python27\Scripts\pip.exe install ecdsa==0.13
- ps: C:\Python27\Scripts\pip.exe install jsonrpc==1.2
- ps: C:\Python27\Scripts\pip.exe install jsonrpclib==0.1.7
- ps: C:\Python27\Scripts\pip.exe install loggly-python-handler==1.0.0
- ps: C:\Python27\Scripts\pip.exe install pbkdf2==1.3
- ps: C:\Python27\Scripts\pip.exe install protobuf==3.0.0
- ps: C:\Python27\Scripts\pip.exe install pycrypto==2.6.1
- ps: C:\Python27\Scripts\pip.exe install python-bitcoinrpc==0.1
- ps: C:\Python27\Scripts\pip.exe install qrcode==5.2.2
- ps: C:\Python27\Scripts\pip.exe install requests_futures==0.9.7
- ps: C:\Python27\Scripts\pip.exe install seccure==0.3.1.3
- ps: C:\Python27\Scripts\pip.exe install simplejson==3.8.2
- ps: C:\Python27\Scripts\pip.exe install slowaes==0.1a1
- ps: C:\Python27\Scripts\pip.exe install txJSON-RPC==0.3.1
- ps: C:\Python27\Scripts\pip.exe install unqlite==0.5.3
- ps: C:\Python27\Scripts\pip.exe install wsgiref==0.1.2
- ps: C:\Python27\Scripts\pip.exe install base58==0.2.2
- ps: C:\Python27\Scripts\pip.exe install googlefinance==0.7
- ps: C:\Python27\Scripts\pip.exe install git+https://github.com/lbryio/lbryum.git
- ps: cd C:\temp
- ps: git clone --depth 1 https://github.com/lbryio/lbry.git
- ps: cd C:\temp\lbry
build_script:
- cmd: C:\Python27\python.exe setup_win32.py build bdist_msi
artifacts:
- path: dist/*.msi
name: msi
- path: build/exe.win32-2.7/
name: lbry-portable
- path: packaging/windows/lbry-win32-app/LBRY-URI.reg
name: LBRY-URI

View file

@ -1,2 +1,2 @@
__version__ = "0.4.1" __version__ = "0.4.3"
version = tuple(__version__.split('.')) version = tuple(__version__.split('.'))

View file

@ -4,7 +4,7 @@ Some network wide and also application specific parameters
import os import os
IS_DEVELOPMENT_VERSION = True IS_DEVELOPMENT_VERSION = False
MAX_HANDSHAKE_SIZE = 2**16 MAX_HANDSHAKE_SIZE = 2**16
MAX_REQUEST_SIZE = 2**16 MAX_REQUEST_SIZE = 2**16

View file

@ -279,11 +279,16 @@ class LBRYWallet(object):
def _send_payments(self): def _send_payments(self):
payments_to_send = {} payments_to_send = {}
for address, points in self.queued_payments.items(): for address, points in self.queued_payments.items():
log.info("Should be sending %s points to %s", str(points), str(address)) if points > 0:
payments_to_send[address] = points log.info("Should be sending %s points to %s", str(points), str(address))
self.total_reserved_points -= points payments_to_send[address] = points
self.wallet_balance -= points self.total_reserved_points -= points
self.wallet_balance -= points
else:
log.info("Skipping dust")
del self.queued_payments[address] del self.queued_payments[address]
if payments_to_send: if payments_to_send:
log.info("Creating a transaction with outputs %s", str(payments_to_send)) log.info("Creating a transaction with outputs %s", str(payments_to_send))
d = self._do_send_many(payments_to_send) d = self._do_send_many(payments_to_send)
@ -1200,8 +1205,10 @@ class LBRYumWallet(LBRYWallet):
def get_balance(self): def get_balance(self):
cmd = known_commands['getbalance'] cmd = known_commands['getbalance']
func = getattr(self.cmd_runner, cmd.name) func = getattr(self.cmd_runner, cmd.name)
d = threads.deferToThread(func) accounts = None
d.addCallback(lambda result: result['unmatured'] if 'unmatured' in result else result['confirmed']) exclude_claimtrietx = True
d = threads.deferToThread(func, accounts, exclude_claimtrietx)
d.addCallback(lambda result: result['confirmed'])
d.addCallback(Decimal) d.addCallback(Decimal)
return d return d

View file

@ -1,17 +1,17 @@
import logging import logging
import os # import os
def migrate_db(db_dir, start, end): def migrate_db(db_dir, start, end):
current = start current = start
old_dirs = [] old_dirs = []
if os.name == "nt": # if os.name == "nt":
return old_dirs # return old_dirs
while current < end: # while current < end:
if current == 0: # if current == 0:
from lbrynet.db_migrator.migrate0to1 import do_migration # from lbrynet.db_migrator.migrate0to1 import do_migration
old_dirs.append(do_migration(db_dir)) # old_dirs.append(do_migration(db_dir))
current += 1 # current += 1
return old_dirs return old_dirs

View file

@ -1,307 +1,307 @@
import sqlite3 # import sqlite3
import unqlite # import unqlite
import leveldb # import leveldb
import shutil # import shutil
import os # import os
import logging # import logging
import json # import json
#
#
log = logging.getLogger(__name__) # log = logging.getLogger(__name__)
#
#
known_dbs = ['lbryfile_desc.db', 'lbryfiles.db', 'valuable_blobs.db', 'blobs.db', # known_dbs = ['lbryfile_desc.db', 'lbryfiles.db', 'valuable_blobs.db', 'blobs.db',
'lbryfile_blob.db', 'lbryfile_info.db', 'settings.db', 'blind_settings.db', # 'lbryfile_blob.db', 'lbryfile_info.db', 'settings.db', 'blind_settings.db',
'blind_peers.db', 'blind_info.db', 'lbryfile_info.db', 'lbryfile_manager.db', # 'blind_peers.db', 'blind_info.db', 'lbryfile_info.db', 'lbryfile_manager.db',
'live_stream.db', 'stream_info.db', 'stream_blob.db', 'stream_desc.db'] # 'live_stream.db', 'stream_info.db', 'stream_blob.db', 'stream_desc.db']
#
#
def do_move(from_dir, to_dir): # def do_move(from_dir, to_dir):
for known_db in known_dbs: # for known_db in known_dbs:
known_db_path = os.path.join(from_dir, known_db) # known_db_path = os.path.join(from_dir, known_db)
if os.path.exists(known_db_path): # if os.path.exists(known_db_path):
log.debug("Moving %s to %s", # log.debug("Moving %s to %s",
os.path.abspath(known_db_path), # os.path.abspath(known_db_path),
os.path.abspath(os.path.join(to_dir, known_db))) # os.path.abspath(os.path.join(to_dir, known_db)))
shutil.move(known_db_path, os.path.join(to_dir, known_db)) # shutil.move(known_db_path, os.path.join(to_dir, known_db))
else: # else:
log.debug("Did not find %s", os.path.abspath(known_db_path)) # log.debug("Did not find %s", os.path.abspath(known_db_path))
#
#
def do_migration(db_dir): # def do_migration(db_dir):
old_dir = os.path.join(db_dir, "_0_to_1_old") # old_dir = os.path.join(db_dir, "_0_to_1_old")
new_dir = os.path.join(db_dir, "_0_to_1_new") # new_dir = os.path.join(db_dir, "_0_to_1_new")
try: # try:
log.info("Moving dbs from the real directory to %s", os.path.abspath(old_dir)) # log.info("Moving dbs from the real directory to %s", os.path.abspath(old_dir))
os.makedirs(old_dir) # os.makedirs(old_dir)
do_move(db_dir, old_dir) # do_move(db_dir, old_dir)
except: # except:
log.error("An error occurred moving the old db files.") # log.error("An error occurred moving the old db files.")
raise # raise
try: # try:
log.info("Creating the new directory in %s", os.path.abspath(new_dir)) # log.info("Creating the new directory in %s", os.path.abspath(new_dir))
os.makedirs(new_dir) # os.makedirs(new_dir)
#
except: # except:
log.error("An error occurred creating the new directory.") # log.error("An error occurred creating the new directory.")
raise # raise
try: # try:
log.info("Doing the migration") # log.info("Doing the migration")
migrate_blob_db(old_dir, new_dir) # migrate_blob_db(old_dir, new_dir)
migrate_lbryfile_db(old_dir, new_dir) # migrate_lbryfile_db(old_dir, new_dir)
migrate_livestream_db(old_dir, new_dir) # migrate_livestream_db(old_dir, new_dir)
migrate_ptc_db(old_dir, new_dir) # migrate_ptc_db(old_dir, new_dir)
migrate_lbryfile_manager_db(old_dir, new_dir) # migrate_lbryfile_manager_db(old_dir, new_dir)
migrate_settings_db(old_dir, new_dir) # migrate_settings_db(old_dir, new_dir)
migrate_repeater_db(old_dir, new_dir) # migrate_repeater_db(old_dir, new_dir)
log.info("Migration succeeded") # log.info("Migration succeeded")
except: # except:
log.error("An error occurred during the migration. Restoring.") # log.error("An error occurred during the migration. Restoring.")
do_move(old_dir, db_dir) # do_move(old_dir, db_dir)
raise # raise
try: # try:
log.info("Moving dbs in the new directory to the real directory") # log.info("Moving dbs in the new directory to the real directory")
do_move(new_dir, db_dir) # do_move(new_dir, db_dir)
db_revision = open(os.path.join(db_dir, 'db_revision'), mode='w+') # db_revision = open(os.path.join(db_dir, 'db_revision'), mode='w+')
db_revision.write("1") # db_revision.write("1")
db_revision.close() # db_revision.close()
os.rmdir(new_dir) # os.rmdir(new_dir)
except: # except:
log.error("An error occurred moving the new db files.") # log.error("An error occurred moving the new db files.")
raise # raise
return old_dir # return old_dir
#
#
def migrate_blob_db(old_db_dir, new_db_dir): # def migrate_blob_db(old_db_dir, new_db_dir):
old_blob_db_path = os.path.join(old_db_dir, "blobs.db") # old_blob_db_path = os.path.join(old_db_dir, "blobs.db")
if not os.path.exists(old_blob_db_path): # if not os.path.exists(old_blob_db_path):
return True # return True
#
old_db = leveldb.LevelDB(old_blob_db_path) # old_db = leveldb.LevelDB(old_blob_db_path)
new_db_conn = sqlite3.connect(os.path.join(new_db_dir, "blobs.db")) # new_db_conn = sqlite3.connect(os.path.join(new_db_dir, "blobs.db"))
c = new_db_conn.cursor() # c = new_db_conn.cursor()
c.execute("create table if not exists blobs (" + # c.execute("create table if not exists blobs (" +
" blob_hash text primary key, " + # " blob_hash text primary key, " +
" blob_length integer, " + # " blob_length integer, " +
" last_verified_time real, " + # " last_verified_time real, " +
" next_announce_time real" # " next_announce_time real"
")") # ")")
new_db_conn.commit() # new_db_conn.commit()
c = new_db_conn.cursor() # c = new_db_conn.cursor()
for blob_hash, blob_info in old_db.RangeIter(): # for blob_hash, blob_info in old_db.RangeIter():
blob_length, verified_time, announce_time = json.loads(blob_info) # blob_length, verified_time, announce_time = json.loads(blob_info)
c.execute("insert into blobs values (?, ?, ?, ?)", # c.execute("insert into blobs values (?, ?, ?, ?)",
(blob_hash, blob_length, verified_time, announce_time)) # (blob_hash, blob_length, verified_time, announce_time))
new_db_conn.commit() # new_db_conn.commit()
new_db_conn.close() # new_db_conn.close()
#
#
def migrate_lbryfile_db(old_db_dir, new_db_dir): # def migrate_lbryfile_db(old_db_dir, new_db_dir):
old_lbryfile_db_path = os.path.join(old_db_dir, "lbryfiles.db") # old_lbryfile_db_path = os.path.join(old_db_dir, "lbryfiles.db")
if not os.path.exists(old_lbryfile_db_path): # if not os.path.exists(old_lbryfile_db_path):
return True # return True
#
stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_info.db")) # stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_info.db"))
stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_blob.db")) # stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_blob.db"))
stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_desc.db")) # stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_desc.db"))
#
db_conn = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db")) # db_conn = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
c = db_conn.cursor() # c = db_conn.cursor()
c.execute("create table if not exists lbry_files (" + # c.execute("create table if not exists lbry_files (" +
" stream_hash text primary key, " + # " stream_hash text primary key, " +
" key text, " + # " key text, " +
" stream_name text, " + # " stream_name text, " +
" suggested_file_name text" + # " suggested_file_name text" +
")") # ")")
c.execute("create table if not exists lbry_file_blobs (" + # c.execute("create table if not exists lbry_file_blobs (" +
" blob_hash text, " + # " blob_hash text, " +
" stream_hash text, " + # " stream_hash text, " +
" position integer, " + # " position integer, " +
" iv text, " + # " iv text, " +
" length integer, " + # " length integer, " +
" foreign key(stream_hash) references lbry_files(stream_hash)" + # " foreign key(stream_hash) references lbry_files(stream_hash)" +
")") # ")")
c.execute("create table if not exists lbry_file_descriptors (" + # c.execute("create table if not exists lbry_file_descriptors (" +
" sd_blob_hash TEXT PRIMARY KEY, " + # " sd_blob_hash TEXT PRIMARY KEY, " +
" stream_hash TEXT, " + # " stream_hash TEXT, " +
" foreign key(stream_hash) references lbry_files(stream_hash)" + # " foreign key(stream_hash) references lbry_files(stream_hash)" +
")") # ")")
db_conn.commit() # db_conn.commit()
c = db_conn.cursor() # c = db_conn.cursor()
for stream_hash, stream_info in stream_info_db.RangeIter(): # for stream_hash, stream_info in stream_info_db.RangeIter():
key, name, suggested_file_name = json.loads(stream_info) # key, name, suggested_file_name = json.loads(stream_info)
c.execute("insert into lbry_files values (?, ?, ?, ?)", # c.execute("insert into lbry_files values (?, ?, ?, ?)",
(stream_hash, key, name, suggested_file_name)) # (stream_hash, key, name, suggested_file_name))
db_conn.commit() # db_conn.commit()
c = db_conn.cursor() # c = db_conn.cursor()
for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter(): # for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
b_h, s_h = json.loads(blob_hash_stream_hash) # b_h, s_h = json.loads(blob_hash_stream_hash)
position, iv, length = json.loads(blob_info) # position, iv, length = json.loads(blob_info)
c.execute("insert into lbry_file_blobs values (?, ?, ?, ?, ?)", # c.execute("insert into lbry_file_blobs values (?, ?, ?, ?, ?)",
(b_h, s_h, position, iv, length)) # (b_h, s_h, position, iv, length))
db_conn.commit() # db_conn.commit()
c = db_conn.cursor() # c = db_conn.cursor()
for sd_blob_hash, stream_hash in stream_desc_db.RangeIter(): # for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
c.execute("insert into lbry_file_descriptors values (?, ?)", # c.execute("insert into lbry_file_descriptors values (?, ?)",
(sd_blob_hash, stream_hash)) # (sd_blob_hash, stream_hash))
db_conn.commit() # db_conn.commit()
db_conn.close() # db_conn.close()
#
#
def migrate_livestream_db(old_db_dir, new_db_dir): # def migrate_livestream_db(old_db_dir, new_db_dir):
old_db_path = os.path.join(old_db_dir, "stream_info.db") # old_db_path = os.path.join(old_db_dir, "stream_info.db")
if not os.path.exists(old_db_path): # if not os.path.exists(old_db_path):
return True # return True
stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_info.db")) # stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_info.db"))
stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_blob.db")) # stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_blob.db"))
stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_desc.db")) # stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_desc.db"))
#
db_conn = sqlite3.connect(os.path.join(new_db_dir, "live_stream.db")) # db_conn = sqlite3.connect(os.path.join(new_db_dir, "live_stream.db"))
#
c = db_conn.cursor() # c = db_conn.cursor()
#
c.execute("create table if not exists live_streams (" + # c.execute("create table if not exists live_streams (" +
" stream_hash text primary key, " + # " stream_hash text primary key, " +
" public_key text, " + # " public_key text, " +
" key text, " + # " key text, " +
" stream_name text, " + # " stream_name text, " +
" next_announce_time real" + # " next_announce_time real" +
")") # ")")
c.execute("create table if not exists live_stream_blobs (" + # c.execute("create table if not exists live_stream_blobs (" +
" blob_hash text, " + # " blob_hash text, " +
" stream_hash text, " + # " stream_hash text, " +
" position integer, " + # " position integer, " +
" revision integer, " + # " revision integer, " +
" iv text, " + # " iv text, " +
" length integer, " + # " length integer, " +
" signature text, " + # " signature text, " +
" foreign key(stream_hash) references live_streams(stream_hash)" + # " foreign key(stream_hash) references live_streams(stream_hash)" +
")") # ")")
c.execute("create table if not exists live_stream_descriptors (" + # c.execute("create table if not exists live_stream_descriptors (" +
" sd_blob_hash TEXT PRIMARY KEY, " + # " sd_blob_hash TEXT PRIMARY KEY, " +
" stream_hash TEXT, " + # " stream_hash TEXT, " +
" foreign key(stream_hash) references live_streams(stream_hash)" + # " foreign key(stream_hash) references live_streams(stream_hash)" +
")") # ")")
#
db_conn.commit() # db_conn.commit()
#
c = db_conn.cursor() # c = db_conn.cursor()
for stream_hash, stream_info in stream_info_db.RangeIter(): # for stream_hash, stream_info in stream_info_db.RangeIter():
public_key, key, name, next_announce_time = json.loads(stream_info) # public_key, key, name, next_announce_time = json.loads(stream_info)
c.execute("insert into live_streams values (?, ?, ?, ?, ?)", # c.execute("insert into live_streams values (?, ?, ?, ?, ?)",
(stream_hash, public_key, key, name, next_announce_time)) # (stream_hash, public_key, key, name, next_announce_time))
db_conn.commit() # db_conn.commit()
c = db_conn.cursor() # c = db_conn.cursor()
for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter(): # for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
b_h, s_h = json.loads(blob_hash_stream_hash) # b_h, s_h = json.loads(blob_hash_stream_hash)
position, revision, iv, length, signature = json.loads(blob_info) # position, revision, iv, length, signature = json.loads(blob_info)
c.execute("insert into live_stream_blobs values (?, ?, ?, ?, ?, ?, ?)", # c.execute("insert into live_stream_blobs values (?, ?, ?, ?, ?, ?, ?)",
(b_h, s_h, position, revision, iv, length, signature)) # (b_h, s_h, position, revision, iv, length, signature))
db_conn.commit() # db_conn.commit()
c = db_conn.cursor() # c = db_conn.cursor()
for sd_blob_hash, stream_hash in stream_desc_db.RangeIter(): # for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
c.execute("insert into live_stream_descriptors values (?, ?)", # c.execute("insert into live_stream_descriptors values (?, ?)",
(sd_blob_hash, stream_hash)) # (sd_blob_hash, stream_hash))
db_conn.commit() # db_conn.commit()
db_conn.close() # db_conn.close()
#
#
def migrate_ptc_db(old_db_dir, new_db_dir): # def migrate_ptc_db(old_db_dir, new_db_dir):
old_db_path = os.path.join(old_db_dir, "ptcwallet.db") # old_db_path = os.path.join(old_db_dir, "ptcwallet.db")
if not os.path.exists(old_db_path): # if not os.path.exists(old_db_path):
return True # return True
old_db = leveldb.LevelDB(old_db_path) # old_db = leveldb.LevelDB(old_db_path)
try: # try:
p_key = old_db.Get("private_key") # p_key = old_db.Get("private_key")
new_db = unqlite.UnQLite(os.path.join(new_db_dir, "ptcwallet.db")) # new_db = unqlite.UnQLite(os.path.join(new_db_dir, "ptcwallet.db"))
new_db['private_key'] = p_key # new_db['private_key'] = p_key
except KeyError: # except KeyError:
pass # pass
#
#
def migrate_lbryfile_manager_db(old_db_dir, new_db_dir): # def migrate_lbryfile_manager_db(old_db_dir, new_db_dir):
old_db_path = os.path.join(old_db_dir, "lbryfiles.db") # old_db_path = os.path.join(old_db_dir, "lbryfiles.db")
if not os.path.exists(old_db_path): # if not os.path.exists(old_db_path):
return True # return True
old_db = leveldb.LevelDB(old_db_path) # old_db = leveldb.LevelDB(old_db_path)
new_db = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db")) # new_db = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
c = new_db.cursor() # c = new_db.cursor()
c.execute("create table if not exists lbry_file_options (" + # c.execute("create table if not exists lbry_file_options (" +
" blob_data_rate real, " + # " blob_data_rate real, " +
" status text," + # " status text," +
" stream_hash text," # " stream_hash text,"
" foreign key(stream_hash) references lbry_files(stream_hash)" + # " foreign key(stream_hash) references lbry_files(stream_hash)" +
")") # ")")
new_db.commit() # new_db.commit()
LBRYFILE_STATUS = "t" # LBRYFILE_STATUS = "t"
LBRYFILE_OPTIONS = "o" # LBRYFILE_OPTIONS = "o"
c = new_db.cursor() # c = new_db.cursor()
for k, v in old_db.RangeIter(): # for k, v in old_db.RangeIter():
key_type, stream_hash = json.loads(k) # key_type, stream_hash = json.loads(k)
if key_type == LBRYFILE_STATUS: # if key_type == LBRYFILE_STATUS:
try: # try:
rate = json.loads(old_db.Get(json.dumps((LBRYFILE_OPTIONS, stream_hash))))[0] # rate = json.loads(old_db.Get(json.dumps((LBRYFILE_OPTIONS, stream_hash))))[0]
except KeyError: # except KeyError:
rate = None # rate = None
c.execute("insert into lbry_file_options values (?, ?, ?)", # c.execute("insert into lbry_file_options values (?, ?, ?)",
(rate, v, stream_hash)) # (rate, v, stream_hash))
new_db.commit() # new_db.commit()
new_db.close() # new_db.close()
#
#
def migrate_settings_db(old_db_dir, new_db_dir): # def migrate_settings_db(old_db_dir, new_db_dir):
old_settings_db_path = os.path.join(old_db_dir, "settings.db") # old_settings_db_path = os.path.join(old_db_dir, "settings.db")
if not os.path.exists(old_settings_db_path): # if not os.path.exists(old_settings_db_path):
return True # return True
old_db = leveldb.LevelDB(old_settings_db_path) # old_db = leveldb.LevelDB(old_settings_db_path)
new_db = unqlite.UnQLite(os.path.join(new_db_dir, "settings.db")) # new_db = unqlite.UnQLite(os.path.join(new_db_dir, "settings.db"))
for k, v in old_db.RangeIter(): # for k, v in old_db.RangeIter():
new_db[k] = v # new_db[k] = v
#
#
def migrate_repeater_db(old_db_dir, new_db_dir): # def migrate_repeater_db(old_db_dir, new_db_dir):
old_repeater_db_path = os.path.join(old_db_dir, "valuable_blobs.db") # old_repeater_db_path = os.path.join(old_db_dir, "valuable_blobs.db")
if not os.path.exists(old_repeater_db_path): # if not os.path.exists(old_repeater_db_path):
return True # return True
old_db = leveldb.LevelDB(old_repeater_db_path) # old_db = leveldb.LevelDB(old_repeater_db_path)
info_db = sqlite3.connect(os.path.join(new_db_dir, "blind_info.db")) # info_db = sqlite3.connect(os.path.join(new_db_dir, "blind_info.db"))
peer_db = sqlite3.connect(os.path.join(new_db_dir, "blind_peers.db")) # peer_db = sqlite3.connect(os.path.join(new_db_dir, "blind_peers.db"))
unql_db = unqlite.UnQLite(os.path.join(new_db_dir, "blind_settings.db")) # unql_db = unqlite.UnQLite(os.path.join(new_db_dir, "blind_settings.db"))
BLOB_INFO_TYPE = 'b' # BLOB_INFO_TYPE = 'b'
SETTING_TYPE = 's' # SETTING_TYPE = 's'
PEER_TYPE = 'p' # PEER_TYPE = 'p'
info_c = info_db.cursor() # info_c = info_db.cursor()
info_c.execute("create table if not exists valuable_blobs (" + # info_c.execute("create table if not exists valuable_blobs (" +
" blob_hash text primary key, " + # " blob_hash text primary key, " +
" blob_length integer, " + # " blob_length integer, " +
" reference text, " + # " reference text, " +
" peer_host text, " + # " peer_host text, " +
" peer_port integer, " + # " peer_port integer, " +
" peer_score text" + # " peer_score text" +
")") # ")")
info_db.commit() # info_db.commit()
peer_c = peer_db.cursor() # peer_c = peer_db.cursor()
peer_c.execute("create table if not exists approved_peers (" + # peer_c.execute("create table if not exists approved_peers (" +
" ip_address text, " + # " ip_address text, " +
" port integer" + # " port integer" +
")") # ")")
peer_db.commit() # peer_db.commit()
info_c = info_db.cursor() # info_c = info_db.cursor()
peer_c = peer_db.cursor() # peer_c = peer_db.cursor()
for k, v in old_db.RangeIter(): # for k, v in old_db.RangeIter():
key_type, key_rest = json.loads(k) # key_type, key_rest = json.loads(k)
if key_type == PEER_TYPE: # if key_type == PEER_TYPE:
host, port = key_rest # host, port = key_rest
peer_c.execute("insert into approved_peers values (?, ?)", # peer_c.execute("insert into approved_peers values (?, ?)",
(host, port)) # (host, port))
elif key_type == SETTING_TYPE: # elif key_type == SETTING_TYPE:
unql_db[key_rest] = v # unql_db[key_rest] = v
elif key_type == BLOB_INFO_TYPE: # elif key_type == BLOB_INFO_TYPE:
blob_hash = key_rest # blob_hash = key_rest
length, reference, peer_host, peer_port, peer_score = json.loads(v) # length, reference, peer_host, peer_port, peer_score = json.loads(v)
info_c.execute("insert into valuable_blobs values (?, ?, ?, ?, ?, ?)", # info_c.execute("insert into valuable_blobs values (?, ?, ?, ?, ?, ?)",
(blob_hash, length, reference, peer_host, peer_port, peer_score)) # (blob_hash, length, reference, peer_host, peer_port, peer_score))
info_db.commit() # info_db.commit()
peer_db.commit() # peer_db.commit()
info_db.close() # info_db.close()
peer_db.close() # peer_db.close()

View file

@ -12,7 +12,7 @@ log = logging.getLogger(__name__)
class DBLBRYFileMetadataManager(object): class DBLBRYFileMetadataManager(object):
"""Store and provide access to LBRY file metadata using leveldb files""" """Store and provide access to LBRY file metadata using sqlite"""
def __init__(self, db_dir): def __init__(self, db_dir):
self.db_dir = db_dir self.db_dir = db_dir

View file

@ -167,7 +167,7 @@ class LBRYConsole():
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
def _check_db_migration(self): def _check_db_migration(self):
old_revision = 0 old_revision = 1
db_revision_file = os.path.join(self.db_dir, "db_revision") db_revision_file = os.path.join(self.db_dir, "db_revision")
if os.path.exists(db_revision_file): if os.path.exists(db_revision_file):
old_revision = int(open(db_revision_file).read().strip()) old_revision = int(open(db_revision_file).read().strip())

View file

@ -952,7 +952,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir)) log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
def _check_db_migration(self): def _check_db_migration(self):
old_revision = 0 old_revision = 1
db_revision_file = os.path.join(self.db_dir, "db_revision") db_revision_file = os.path.join(self.db_dir, "db_revision")
if os.path.exists(db_revision_file): if os.path.exists(db_revision_file):
old_revision = int(open(db_revision_file).read().strip()) old_revision = int(open(db_revision_file).read().strip())

View file

@ -1,5 +1,5 @@
[Desktop Entry] [Desktop Entry]
Version=0.4.1 Version=0.4.3
Name=LBRY Name=LBRY
Comment=The world's first user-owned content marketplace Comment=The world's first user-owned content marketplace
Icon=lbry Icon=lbry

Binary file not shown.

View file

@ -289,16 +289,17 @@ if __name__ == '__main__':
lbry_daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING) lbry_daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING)
try: try:
started_daemon = lbry_daemon.is_running() daemon_running = lbry_daemon.is_running()
start_daemon = False
except: except:
started_daemon = False start_daemon = True
try: try:
lbry_name = LBRYURIHandler.parse_name(sys.argv[1]) lbry_name = LBRYURIHandler.parse_name(sys.argv[1])
except IndexError: except IndexError:
lbry_name = None lbry_name = None
if started_daemon: if start_daemon:
LBRYURIHandler.open_address(lbry_name)
else:
main(lbry_name) main(lbry_name)
else:
LBRYURIHandler.open_address(lbry_name)

334
setup.py
View file

@ -1,51 +1,301 @@
#!/usr/bin/env python #!/usr/bin/env python
from lbrynet import __version__
import ez_setup
ez_setup.use_setuptools()
import sys import sys
import os import os
from setuptools import setup, find_packages from lbrynet import __version__
base_dir = os.path.abspath(os.path.dirname(__file__)) LINUX = 1
DARWIN = 2
WINDOWS = 3
console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
'lbrynet-create-network = lbrynet.create_network:main',
'lbrynet-launch-node = lbrynet.dht.node:main',
'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main']
requires = ['pycrypto', 'twisted', 'miniupnpc', 'yapsy', 'seccure',
'python-bitcoinrpc==0.1', 'txJSON-RPC', 'requests>=2.4.2', 'unqlite==0.2.0',
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance',
'requests_futures']
if sys.platform.startswith("linux"): if sys.platform.startswith("linux"):
platform = LINUX
elif sys.platform.startswith("darwin"):
platform = DARWIN
elif sys.platform.startswith("win"):
platform = WINDOWS
else:
raise Exception("Unknown os: %s" % sys.platform)
base_dir = os.path.abspath(os.path.dirname(__file__))
package_name = "lbrynet"
dist_name = "LBRY"
description = "A decentralized media library and marketplace"
author = "LBRY, Inc"
url = "lbry.io"
maintainer = "Jack Robison"
maintainer_email = "jack@lbry.io"
keywords = "LBRY"
requires = [
'pycrypto',
'twisted',
'miniupnpc',
'yapsy',
'seccure',
'python-bitcoinrpc==0.1',
'txJSON-RPC',
'requests>=2.4.2',
'unqlite==0.2.0',
'lbryum',
'jsonrpc',
'simplejson',
'appdirs',
'six==1.9.0',
'base58',
'googlefinance',
'requests_futures'
]
console_scripts = [
# 'lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
# 'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
# 'lbrynet-create-network = lbrynet.create_network:main',
# 'lbrynet-launch-node = lbrynet.dht.node:main',
# 'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
# 'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
# 'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
# 'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main'
]
if platform == LINUX:
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
requires.append('service-identity') requires.append('service-identity')
setup(name='lbrynet', setup(name=package_name,
description='A decentralized media library and marketplace', description=description,
version=__version__, version=__version__,
maintainer='Alex Grintsvayg', maintainer=maintainer,
maintainer_email='grin@lbry.io', maintainer_email=maintainer_email,
packages=find_packages(base_dir), url=url,
install_requires=requires, author=author,
entry_points={'console_scripts': console_scripts}, keywords=keywords,
data_files=[ packages=find_packages(base_dir),
('lbrynet/lbrynet_console/plugins', install_requires=requires,
[ entry_points={'console_scripts': console_scripts},
os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins', data_files=[
'blindrepeater.yapsy-plugin') ('lbrynet/lbrynet_console/plugins',
] [
), os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins',
], 'blindrepeater.yapsy-plugin')
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'], ]
) ),
],
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'],
)
elif platform == DARWIN:
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(name=package_name,
description=description,
version=__version__,
maintainer=maintainer,
maintainer_email=maintainer_email,
url=url,
author=author,
keywords=keywords,
packages=find_packages(base_dir),
install_requires=requires,
entry_points={'console_scripts': console_scripts},
data_files=[
('lbrynet/lbrynet_console/plugins',
[
os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins',
'blindrepeater.yapsy-plugin')
]
),
],
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'],
)
elif platform == WINDOWS:
import opcode
import pkg_resources
from cx_Freeze import setup, Executable
import requests.certs
win_icon = os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico')
wordlist_path = pkg_resources.resource_filename('lbryum', 'wordlist')
# Allow virtualenv to find distutils of base python installation
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
def find_data_file(filename):
if getattr(sys, 'frozen', False):
# The application is frozen
data_dir = os.path.dirname(sys.executable)
else:
# The application is not frozen
# Change this bit to match where you store your data files:
data_dir = os.path.dirname(__file__)
return os.path.join(data_dir, filename)
if os.path.isdir("C:\Program Files (x86)"):
shortcut_icon = 'C:\Program Files (x86)\lbrynet\icons\lbry256.ico'
else:
shortcut_icon = 'C:\Program Files\lbrynet\icons\lbry256.ico'
shortcut_table = [
('DesktopShortcut', # Shortcut
'DesktopFolder', # Directory
'lbrynet-daemon', # Name
'TARGETDIR', # Component
'[TARGETDIR]\lbrynet-daemon.exe', # Target
'--log-to-console', # Arguments
description, # Description
None, # Hotkey
shortcut_icon, # Icon
None, # IconIndex
None, # ShowCmd
'TARGETDIR', # WkDir
),
('DaemonShortcut', # Shortcut
'DesktopFolder', # Directory
'LBRY', # Name
'TARGETDIR', # Component
'[TARGETDIR]\{0}.exe'.format(dist_name), # Target
None, # Arguments
description, # Description
None, # Hotkey
shortcut_icon, # Icon
None, # IconIndex
None, # ShowCmd
'TARGETDIR', # WkDir
),
('DaemonCLIShortcut', # Shortcut
'DesktopFolder', # Directory
'lbrynet-cli', # Name
'TARGETDIR', # Component
'[TARGETDIR]\lbrynet-cli.exe', # Target
None, # Arguments
description, # Description
None, # Hotkey
shortcut_icon, # Icon
None, # IconIndex
None, # ShowCmd
'TARGETDIR', # WkDir
),
]
msi_data = {"Shortcut": shortcut_table}
bdist_msi_options = {
'upgrade_code': '{18c0e933-ad08-44e8-a413-1d0ed624c100}',
'add_to_path': True,
# Default install path is 'C:\Program Files\' for 32-bit or 'C:\Program Files (x86)\' for 64-bit
# 'initial_target_dir': r'[LocalAppDataFolder]\{0}'.format(name),
'data': msi_data
}
build_exe_options = {
'include_msvcr': True,
'includes': [],
'packages': ['cython',
'twisted',
'yapsy',
'appdirs',
'argparse',
'base58',
'colorama',
'cx_Freeze',
'dns',
'ecdsa',
'gmpy',
'googlefinance',
'jsonrpc',
'jsonrpclib',
'lbryum',
'loggly',
'miniupnpc',
'pbkdf2',
'google.protobuf',
'Crypto',
'bitcoinrpc',
'win32api',
'qrcode',
'requests',
'requests_futures',
'seccure',
'simplejson',
'six',
'aes',
'txjsonrpc',
'unqlite',
'wsgiref',
'zope.interface',
'os',
'pkg_resources'
],
'excludes': ['distutils', 'collections.sys', 'collections._weakref', 'collections.abc',
'Tkinter', 'tk', 'tcl', 'PyQt4', 'nose', 'mock'
'zope.interface._zope_interface_coptimizations', 'leveldb'],
'include_files': [(distutils_path, 'distutils'), (requests.certs.where(), 'cacert.pem'),
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry16.ico'),
os.path.join('icons', 'lbry16.ico')),
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico'),
os.path.join('icons', 'lbry256.ico')),
(os.path.join(wordlist_path, 'chinese_simplified.txt'),
os.path.join('wordlist', 'chinese_simplified.txt')),
(os.path.join(wordlist_path, 'english.txt'), os.path.join('wordlist', 'english.txt')),
(os.path.join(wordlist_path, 'japanese.txt'), os.path.join('wordlist', 'japanese.txt')),
(os.path.join(wordlist_path, 'portuguese.txt'), os.path.join('wordlist', 'portuguese.txt')),
(os.path.join(wordlist_path, 'spanish.txt'), os.path.join('wordlist', 'spanish.txt'))
],
'namespace_packages': ['zope', 'google']}
tray_app = Executable(
script=os.path.join('packaging', 'windows', 'lbry-win32-app', 'LBRYWin32App.py'),
base='Win32GUI',
icon=win_icon,
compress=True,
shortcutName=dist_name,
shortcutDir='DesktopFolder',
targetName='{0}.exe'.format(dist_name)
# targetDir="LocalAppDataFolder"
)
daemon_dir = os.path.join('lbrynet', 'lbrynet_daemon')
daemon_exe = Executable(
script=os.path.join(daemon_dir, 'LBRYDaemonControl.py'),
icon=win_icon,
shortcutName="lbrynet-daemon",
shortcutDir='DesktopFolder',
targetName='lbrynet-daemon.exe'
)
cli_exe = Executable(
script=os.path.join(daemon_dir, 'LBRYDaemonCLI.py'),
icon=win_icon,
shortcutName="lbrynet-cli",
shortcutDir='DesktopFolder',
targetName='lbrynet-cli.exe'
)
setup(
name=package_name,
description=description,
version=__version__,
maintainer=maintainer,
maintainer_email=maintainer_email,
url=url,
author=author,
keywords=keywords,
data_files=[],
options={'build_exe': build_exe_options,
'bdist_msi': bdist_msi_options},
executables=[
tray_app,
daemon_exe,
cli_exe
],
)

View file

@ -1,151 +0,0 @@
# -*- coding: utf-8 -*-
"""
To create local builds and distributable .msi, run the following command:
python setup_win32.py build bdist_msi
"""
import opcode
import os
import pkg_resources
import sys
from cx_Freeze import setup, Executable
import requests.certs
from lbrynet import __version__
name = 'LBRY'
description = 'A decentralized media library and marketplace'
win_icon = os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico')
wordlist_path = pkg_resources.resource_filename('lbryum', 'wordlist')
base_dir = os.path.abspath(os.path.dirname(__file__))
# Allow virtualenv to find distutils of base python installation
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
def find_data_file(filename):
if getattr(sys, 'frozen', False):
# The application is frozen
data_dir = os.path.dirname(sys.executable)
else:
# The application is not frozen
# Change this bit to match where you store your data files:
data_dir = os.path.dirname(__file__)
return os.path.join(data_dir, filename)
console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
'lbrynet-create-network = lbrynet.create_network:main',
'lbrynet-launch-node = lbrynet.dht.node:main',
'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main']
# shortcut_table = [
# ('DesktopShortcut', # Shortcut
# 'DesktopFolder', # Directory
# name, # Name
# 'TARGETDIR', # Component
# '[TARGETDIR]\{0}.exe'.format(name), # Target
# None, # Arguments
# description, # Description
# None, # Hotkey
# win_icon, # Icon (doesn't work for some reason?)
# None, # IconIndex
# None, # ShowCmd
# 'TARGETDIR', # WkDir
# ),
# ]
#
# msi_data = {'Shortcut': shortcut_table}
bdist_msi_options = {
'upgrade_code': '{18c0e933-ad08-44e8-a413-1d0ed624c100}',
'add_to_path': False,
# Default install path is 'C:\Program Files\' for 32-bit or 'C:\Program Files (x86)\' for 64-bit
# 'initial_target_dir': r'[LocalAppDataFolder]\{0}'.format(name),
# 'data': msi_data
}
build_exe_options = {
'include_msvcr': True,
'includes': [],
'packages': ['cython',
'twisted',
'yapsy',
'appdirs',
'argparse',
'base58',
'colorama',
'cx_Freeze',
'dns',
'ecdsa',
'gmpy',
'googlefinance',
'jsonrpc',
'jsonrpclib',
'lbryum',
'loggly',
'miniupnpc',
'pbkdf2',
'google.protobuf',
'Crypto',
'bitcoinrpc',
'win32api',
'qrcode',
'requests',
'requests_futures',
'seccure',
'simplejson',
'six',
'aes',
'txjsonrpc',
'unqlite',
'wsgiref',
'zope.interface',
'os',
'pkg_resources'
],
'excludes': ['distutils', 'collections.sys', 'collections._weakref', 'collections.abc',
'Tkinter', 'tk', 'tcl', 'PyQt4', 'nose', 'mock'
'zope.interface._zope_interface_coptimizations'],
'include_files': [(distutils_path, 'distutils'), (requests.certs.where(), 'cacert.pem'),
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry16.ico'),
os.path.join('icons', 'lbry16.ico')),
(os.path.join(wordlist_path, 'chinese_simplified.txt'),
os.path.join('wordlist', 'chinese_simplified.txt')),
(os.path.join(wordlist_path, 'english.txt'), os.path.join('wordlist', 'english.txt')),
(os.path.join(wordlist_path, 'japanese.txt'), os.path.join('wordlist', 'japanese.txt')),
(os.path.join(wordlist_path, 'portuguese.txt'), os.path.join('wordlist', 'portuguese.txt')),
(os.path.join(wordlist_path, 'spanish.txt'), os.path.join('wordlist', 'spanish.txt'))
],
'namespace_packages': ['zope', 'google']}
exe = Executable(
script=os.path.join('packaging', 'windows', 'lbry-win32-app', 'LBRYWin32App.py'),
base='Win32GUI',
icon=win_icon,
compress=True,
shortcutName=name,
shortcutDir='DesktopFolder',
targetName='{0}.exe'.format(name)
# targetDir="LocalAppDataFolder"
)
setup(
name=name,
version=__version__,
description=name + ": " + description,
url='lbry.io',
author='LBRY, Inc.',
keywords='LBRY',
data_files=[],
options={'build_exe': build_exe_options,
'bdist_msi': bdist_msi_options},
executables=[exe],
)

View file

@ -43,9 +43,13 @@ class TestReflector(unittest.TestCase):
d.addCallback(lambda _: self.reflector_port.stopListening()) d.addCallback(lambda _: self.reflector_port.stopListening())
def delete_test_env(): def delete_test_env():
shutil.rmtree('client') try:
shutil.rmtree('client')
except:
raise unittest.SkipTest("TODO: fix this for windows")
d.addCallback(lambda _: threads.deferToThread(delete_test_env)) d.addCallback(lambda _: threads.deferToThread(delete_test_env))
d.addErrback(lambda err: str(err))
return d return d
def test_reflector(self): def test_reflector(self):