forked from LBRYCommunity/lbry-sdk
build windows in setup.py, drop leveldb requirement
This commit is contained in:
parent
1d39ec0317
commit
0203ef66ea
7 changed files with 611 additions and 359 deletions
|
@ -9,6 +9,7 @@ branches:
|
|||
only:
|
||||
- master
|
||||
- appveyor
|
||||
- windows-setup
|
||||
|
||||
clone_folder: c:\projects\lbry
|
||||
|
||||
|
@ -107,10 +108,10 @@ init:
|
|||
cd C:\projects\lbry
|
||||
|
||||
install:
|
||||
- cmd: C:\Python27\python.exe setup_win32.py install
|
||||
- cmd: C:\Python27\python.exe setup.py install
|
||||
|
||||
build_script:
|
||||
- cmd: C:\Python27\python.exe setup_win32.py build bdist_msi
|
||||
- cmd: C:\Python27\python.exe setup.py build bdist_msi
|
||||
|
||||
test_script:
|
||||
- ps: >-
|
||||
|
|
|
@ -1,307 +1,307 @@
|
|||
import sqlite3
|
||||
import unqlite
|
||||
import leveldb
|
||||
import shutil
|
||||
import os
|
||||
import logging
|
||||
import json
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
known_dbs = ['lbryfile_desc.db', 'lbryfiles.db', 'valuable_blobs.db', 'blobs.db',
|
||||
'lbryfile_blob.db', 'lbryfile_info.db', 'settings.db', 'blind_settings.db',
|
||||
'blind_peers.db', 'blind_info.db', 'lbryfile_info.db', 'lbryfile_manager.db',
|
||||
'live_stream.db', 'stream_info.db', 'stream_blob.db', 'stream_desc.db']
|
||||
|
||||
|
||||
def do_move(from_dir, to_dir):
|
||||
for known_db in known_dbs:
|
||||
known_db_path = os.path.join(from_dir, known_db)
|
||||
if os.path.exists(known_db_path):
|
||||
log.debug("Moving %s to %s",
|
||||
os.path.abspath(known_db_path),
|
||||
os.path.abspath(os.path.join(to_dir, known_db)))
|
||||
shutil.move(known_db_path, os.path.join(to_dir, known_db))
|
||||
else:
|
||||
log.debug("Did not find %s", os.path.abspath(known_db_path))
|
||||
|
||||
|
||||
def do_migration(db_dir):
|
||||
old_dir = os.path.join(db_dir, "_0_to_1_old")
|
||||
new_dir = os.path.join(db_dir, "_0_to_1_new")
|
||||
try:
|
||||
log.info("Moving dbs from the real directory to %s", os.path.abspath(old_dir))
|
||||
os.makedirs(old_dir)
|
||||
do_move(db_dir, old_dir)
|
||||
except:
|
||||
log.error("An error occurred moving the old db files.")
|
||||
raise
|
||||
try:
|
||||
log.info("Creating the new directory in %s", os.path.abspath(new_dir))
|
||||
os.makedirs(new_dir)
|
||||
|
||||
except:
|
||||
log.error("An error occurred creating the new directory.")
|
||||
raise
|
||||
try:
|
||||
log.info("Doing the migration")
|
||||
migrate_blob_db(old_dir, new_dir)
|
||||
migrate_lbryfile_db(old_dir, new_dir)
|
||||
migrate_livestream_db(old_dir, new_dir)
|
||||
migrate_ptc_db(old_dir, new_dir)
|
||||
migrate_lbryfile_manager_db(old_dir, new_dir)
|
||||
migrate_settings_db(old_dir, new_dir)
|
||||
migrate_repeater_db(old_dir, new_dir)
|
||||
log.info("Migration succeeded")
|
||||
except:
|
||||
log.error("An error occurred during the migration. Restoring.")
|
||||
do_move(old_dir, db_dir)
|
||||
raise
|
||||
try:
|
||||
log.info("Moving dbs in the new directory to the real directory")
|
||||
do_move(new_dir, db_dir)
|
||||
db_revision = open(os.path.join(db_dir, 'db_revision'), mode='w+')
|
||||
db_revision.write("1")
|
||||
db_revision.close()
|
||||
os.rmdir(new_dir)
|
||||
except:
|
||||
log.error("An error occurred moving the new db files.")
|
||||
raise
|
||||
return old_dir
|
||||
|
||||
|
||||
def migrate_blob_db(old_db_dir, new_db_dir):
|
||||
old_blob_db_path = os.path.join(old_db_dir, "blobs.db")
|
||||
if not os.path.exists(old_blob_db_path):
|
||||
return True
|
||||
|
||||
old_db = leveldb.LevelDB(old_blob_db_path)
|
||||
new_db_conn = sqlite3.connect(os.path.join(new_db_dir, "blobs.db"))
|
||||
c = new_db_conn.cursor()
|
||||
c.execute("create table if not exists blobs (" +
|
||||
" blob_hash text primary key, " +
|
||||
" blob_length integer, " +
|
||||
" last_verified_time real, " +
|
||||
" next_announce_time real"
|
||||
")")
|
||||
new_db_conn.commit()
|
||||
c = new_db_conn.cursor()
|
||||
for blob_hash, blob_info in old_db.RangeIter():
|
||||
blob_length, verified_time, announce_time = json.loads(blob_info)
|
||||
c.execute("insert into blobs values (?, ?, ?, ?)",
|
||||
(blob_hash, blob_length, verified_time, announce_time))
|
||||
new_db_conn.commit()
|
||||
new_db_conn.close()
|
||||
|
||||
|
||||
def migrate_lbryfile_db(old_db_dir, new_db_dir):
|
||||
old_lbryfile_db_path = os.path.join(old_db_dir, "lbryfiles.db")
|
||||
if not os.path.exists(old_lbryfile_db_path):
|
||||
return True
|
||||
|
||||
stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_info.db"))
|
||||
stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_blob.db"))
|
||||
stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_desc.db"))
|
||||
|
||||
db_conn = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
|
||||
c = db_conn.cursor()
|
||||
c.execute("create table if not exists lbry_files (" +
|
||||
" stream_hash text primary key, " +
|
||||
" key text, " +
|
||||
" stream_name text, " +
|
||||
" suggested_file_name text" +
|
||||
")")
|
||||
c.execute("create table if not exists lbry_file_blobs (" +
|
||||
" blob_hash text, " +
|
||||
" stream_hash text, " +
|
||||
" position integer, " +
|
||||
" iv text, " +
|
||||
" length integer, " +
|
||||
" foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
")")
|
||||
c.execute("create table if not exists lbry_file_descriptors (" +
|
||||
" sd_blob_hash TEXT PRIMARY KEY, " +
|
||||
" stream_hash TEXT, " +
|
||||
" foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
")")
|
||||
db_conn.commit()
|
||||
c = db_conn.cursor()
|
||||
for stream_hash, stream_info in stream_info_db.RangeIter():
|
||||
key, name, suggested_file_name = json.loads(stream_info)
|
||||
c.execute("insert into lbry_files values (?, ?, ?, ?)",
|
||||
(stream_hash, key, name, suggested_file_name))
|
||||
db_conn.commit()
|
||||
c = db_conn.cursor()
|
||||
for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
|
||||
b_h, s_h = json.loads(blob_hash_stream_hash)
|
||||
position, iv, length = json.loads(blob_info)
|
||||
c.execute("insert into lbry_file_blobs values (?, ?, ?, ?, ?)",
|
||||
(b_h, s_h, position, iv, length))
|
||||
db_conn.commit()
|
||||
c = db_conn.cursor()
|
||||
for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
|
||||
c.execute("insert into lbry_file_descriptors values (?, ?)",
|
||||
(sd_blob_hash, stream_hash))
|
||||
db_conn.commit()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
def migrate_livestream_db(old_db_dir, new_db_dir):
|
||||
old_db_path = os.path.join(old_db_dir, "stream_info.db")
|
||||
if not os.path.exists(old_db_path):
|
||||
return True
|
||||
stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_info.db"))
|
||||
stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_blob.db"))
|
||||
stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_desc.db"))
|
||||
|
||||
db_conn = sqlite3.connect(os.path.join(new_db_dir, "live_stream.db"))
|
||||
|
||||
c = db_conn.cursor()
|
||||
|
||||
c.execute("create table if not exists live_streams (" +
|
||||
" stream_hash text primary key, " +
|
||||
" public_key text, " +
|
||||
" key text, " +
|
||||
" stream_name text, " +
|
||||
" next_announce_time real" +
|
||||
")")
|
||||
c.execute("create table if not exists live_stream_blobs (" +
|
||||
" blob_hash text, " +
|
||||
" stream_hash text, " +
|
||||
" position integer, " +
|
||||
" revision integer, " +
|
||||
" iv text, " +
|
||||
" length integer, " +
|
||||
" signature text, " +
|
||||
" foreign key(stream_hash) references live_streams(stream_hash)" +
|
||||
")")
|
||||
c.execute("create table if not exists live_stream_descriptors (" +
|
||||
" sd_blob_hash TEXT PRIMARY KEY, " +
|
||||
" stream_hash TEXT, " +
|
||||
" foreign key(stream_hash) references live_streams(stream_hash)" +
|
||||
")")
|
||||
|
||||
db_conn.commit()
|
||||
|
||||
c = db_conn.cursor()
|
||||
for stream_hash, stream_info in stream_info_db.RangeIter():
|
||||
public_key, key, name, next_announce_time = json.loads(stream_info)
|
||||
c.execute("insert into live_streams values (?, ?, ?, ?, ?)",
|
||||
(stream_hash, public_key, key, name, next_announce_time))
|
||||
db_conn.commit()
|
||||
c = db_conn.cursor()
|
||||
for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
|
||||
b_h, s_h = json.loads(blob_hash_stream_hash)
|
||||
position, revision, iv, length, signature = json.loads(blob_info)
|
||||
c.execute("insert into live_stream_blobs values (?, ?, ?, ?, ?, ?, ?)",
|
||||
(b_h, s_h, position, revision, iv, length, signature))
|
||||
db_conn.commit()
|
||||
c = db_conn.cursor()
|
||||
for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
|
||||
c.execute("insert into live_stream_descriptors values (?, ?)",
|
||||
(sd_blob_hash, stream_hash))
|
||||
db_conn.commit()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
def migrate_ptc_db(old_db_dir, new_db_dir):
|
||||
old_db_path = os.path.join(old_db_dir, "ptcwallet.db")
|
||||
if not os.path.exists(old_db_path):
|
||||
return True
|
||||
old_db = leveldb.LevelDB(old_db_path)
|
||||
try:
|
||||
p_key = old_db.Get("private_key")
|
||||
new_db = unqlite.UnQLite(os.path.join(new_db_dir, "ptcwallet.db"))
|
||||
new_db['private_key'] = p_key
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
def migrate_lbryfile_manager_db(old_db_dir, new_db_dir):
|
||||
old_db_path = os.path.join(old_db_dir, "lbryfiles.db")
|
||||
if not os.path.exists(old_db_path):
|
||||
return True
|
||||
old_db = leveldb.LevelDB(old_db_path)
|
||||
new_db = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
|
||||
c = new_db.cursor()
|
||||
c.execute("create table if not exists lbry_file_options (" +
|
||||
" blob_data_rate real, " +
|
||||
" status text," +
|
||||
" stream_hash text,"
|
||||
" foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
")")
|
||||
new_db.commit()
|
||||
LBRYFILE_STATUS = "t"
|
||||
LBRYFILE_OPTIONS = "o"
|
||||
c = new_db.cursor()
|
||||
for k, v in old_db.RangeIter():
|
||||
key_type, stream_hash = json.loads(k)
|
||||
if key_type == LBRYFILE_STATUS:
|
||||
try:
|
||||
rate = json.loads(old_db.Get(json.dumps((LBRYFILE_OPTIONS, stream_hash))))[0]
|
||||
except KeyError:
|
||||
rate = None
|
||||
c.execute("insert into lbry_file_options values (?, ?, ?)",
|
||||
(rate, v, stream_hash))
|
||||
new_db.commit()
|
||||
new_db.close()
|
||||
|
||||
|
||||
def migrate_settings_db(old_db_dir, new_db_dir):
|
||||
old_settings_db_path = os.path.join(old_db_dir, "settings.db")
|
||||
if not os.path.exists(old_settings_db_path):
|
||||
return True
|
||||
old_db = leveldb.LevelDB(old_settings_db_path)
|
||||
new_db = unqlite.UnQLite(os.path.join(new_db_dir, "settings.db"))
|
||||
for k, v in old_db.RangeIter():
|
||||
new_db[k] = v
|
||||
|
||||
|
||||
def migrate_repeater_db(old_db_dir, new_db_dir):
|
||||
old_repeater_db_path = os.path.join(old_db_dir, "valuable_blobs.db")
|
||||
if not os.path.exists(old_repeater_db_path):
|
||||
return True
|
||||
old_db = leveldb.LevelDB(old_repeater_db_path)
|
||||
info_db = sqlite3.connect(os.path.join(new_db_dir, "blind_info.db"))
|
||||
peer_db = sqlite3.connect(os.path.join(new_db_dir, "blind_peers.db"))
|
||||
unql_db = unqlite.UnQLite(os.path.join(new_db_dir, "blind_settings.db"))
|
||||
BLOB_INFO_TYPE = 'b'
|
||||
SETTING_TYPE = 's'
|
||||
PEER_TYPE = 'p'
|
||||
info_c = info_db.cursor()
|
||||
info_c.execute("create table if not exists valuable_blobs (" +
|
||||
" blob_hash text primary key, " +
|
||||
" blob_length integer, " +
|
||||
" reference text, " +
|
||||
" peer_host text, " +
|
||||
" peer_port integer, " +
|
||||
" peer_score text" +
|
||||
")")
|
||||
info_db.commit()
|
||||
peer_c = peer_db.cursor()
|
||||
peer_c.execute("create table if not exists approved_peers (" +
|
||||
" ip_address text, " +
|
||||
" port integer" +
|
||||
")")
|
||||
peer_db.commit()
|
||||
info_c = info_db.cursor()
|
||||
peer_c = peer_db.cursor()
|
||||
for k, v in old_db.RangeIter():
|
||||
key_type, key_rest = json.loads(k)
|
||||
if key_type == PEER_TYPE:
|
||||
host, port = key_rest
|
||||
peer_c.execute("insert into approved_peers values (?, ?)",
|
||||
(host, port))
|
||||
elif key_type == SETTING_TYPE:
|
||||
unql_db[key_rest] = v
|
||||
elif key_type == BLOB_INFO_TYPE:
|
||||
blob_hash = key_rest
|
||||
length, reference, peer_host, peer_port, peer_score = json.loads(v)
|
||||
info_c.execute("insert into valuable_blobs values (?, ?, ?, ?, ?, ?)",
|
||||
(blob_hash, length, reference, peer_host, peer_port, peer_score))
|
||||
info_db.commit()
|
||||
peer_db.commit()
|
||||
info_db.close()
|
||||
peer_db.close()
|
||||
# import sqlite3
|
||||
# import unqlite
|
||||
# import leveldb
|
||||
# import shutil
|
||||
# import os
|
||||
# import logging
|
||||
# import json
|
||||
#
|
||||
#
|
||||
# log = logging.getLogger(__name__)
|
||||
#
|
||||
#
|
||||
# known_dbs = ['lbryfile_desc.db', 'lbryfiles.db', 'valuable_blobs.db', 'blobs.db',
|
||||
# 'lbryfile_blob.db', 'lbryfile_info.db', 'settings.db', 'blind_settings.db',
|
||||
# 'blind_peers.db', 'blind_info.db', 'lbryfile_info.db', 'lbryfile_manager.db',
|
||||
# 'live_stream.db', 'stream_info.db', 'stream_blob.db', 'stream_desc.db']
|
||||
#
|
||||
#
|
||||
# def do_move(from_dir, to_dir):
|
||||
# for known_db in known_dbs:
|
||||
# known_db_path = os.path.join(from_dir, known_db)
|
||||
# if os.path.exists(known_db_path):
|
||||
# log.debug("Moving %s to %s",
|
||||
# os.path.abspath(known_db_path),
|
||||
# os.path.abspath(os.path.join(to_dir, known_db)))
|
||||
# shutil.move(known_db_path, os.path.join(to_dir, known_db))
|
||||
# else:
|
||||
# log.debug("Did not find %s", os.path.abspath(known_db_path))
|
||||
#
|
||||
#
|
||||
# def do_migration(db_dir):
|
||||
# old_dir = os.path.join(db_dir, "_0_to_1_old")
|
||||
# new_dir = os.path.join(db_dir, "_0_to_1_new")
|
||||
# try:
|
||||
# log.info("Moving dbs from the real directory to %s", os.path.abspath(old_dir))
|
||||
# os.makedirs(old_dir)
|
||||
# do_move(db_dir, old_dir)
|
||||
# except:
|
||||
# log.error("An error occurred moving the old db files.")
|
||||
# raise
|
||||
# try:
|
||||
# log.info("Creating the new directory in %s", os.path.abspath(new_dir))
|
||||
# os.makedirs(new_dir)
|
||||
#
|
||||
# except:
|
||||
# log.error("An error occurred creating the new directory.")
|
||||
# raise
|
||||
# try:
|
||||
# log.info("Doing the migration")
|
||||
# migrate_blob_db(old_dir, new_dir)
|
||||
# migrate_lbryfile_db(old_dir, new_dir)
|
||||
# migrate_livestream_db(old_dir, new_dir)
|
||||
# migrate_ptc_db(old_dir, new_dir)
|
||||
# migrate_lbryfile_manager_db(old_dir, new_dir)
|
||||
# migrate_settings_db(old_dir, new_dir)
|
||||
# migrate_repeater_db(old_dir, new_dir)
|
||||
# log.info("Migration succeeded")
|
||||
# except:
|
||||
# log.error("An error occurred during the migration. Restoring.")
|
||||
# do_move(old_dir, db_dir)
|
||||
# raise
|
||||
# try:
|
||||
# log.info("Moving dbs in the new directory to the real directory")
|
||||
# do_move(new_dir, db_dir)
|
||||
# db_revision = open(os.path.join(db_dir, 'db_revision'), mode='w+')
|
||||
# db_revision.write("1")
|
||||
# db_revision.close()
|
||||
# os.rmdir(new_dir)
|
||||
# except:
|
||||
# log.error("An error occurred moving the new db files.")
|
||||
# raise
|
||||
# return old_dir
|
||||
#
|
||||
#
|
||||
# def migrate_blob_db(old_db_dir, new_db_dir):
|
||||
# old_blob_db_path = os.path.join(old_db_dir, "blobs.db")
|
||||
# if not os.path.exists(old_blob_db_path):
|
||||
# return True
|
||||
#
|
||||
# old_db = leveldb.LevelDB(old_blob_db_path)
|
||||
# new_db_conn = sqlite3.connect(os.path.join(new_db_dir, "blobs.db"))
|
||||
# c = new_db_conn.cursor()
|
||||
# c.execute("create table if not exists blobs (" +
|
||||
# " blob_hash text primary key, " +
|
||||
# " blob_length integer, " +
|
||||
# " last_verified_time real, " +
|
||||
# " next_announce_time real"
|
||||
# ")")
|
||||
# new_db_conn.commit()
|
||||
# c = new_db_conn.cursor()
|
||||
# for blob_hash, blob_info in old_db.RangeIter():
|
||||
# blob_length, verified_time, announce_time = json.loads(blob_info)
|
||||
# c.execute("insert into blobs values (?, ?, ?, ?)",
|
||||
# (blob_hash, blob_length, verified_time, announce_time))
|
||||
# new_db_conn.commit()
|
||||
# new_db_conn.close()
|
||||
#
|
||||
#
|
||||
# def migrate_lbryfile_db(old_db_dir, new_db_dir):
|
||||
# old_lbryfile_db_path = os.path.join(old_db_dir, "lbryfiles.db")
|
||||
# if not os.path.exists(old_lbryfile_db_path):
|
||||
# return True
|
||||
#
|
||||
# stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_info.db"))
|
||||
# stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_blob.db"))
|
||||
# stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "lbryfile_desc.db"))
|
||||
#
|
||||
# db_conn = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
|
||||
# c = db_conn.cursor()
|
||||
# c.execute("create table if not exists lbry_files (" +
|
||||
# " stream_hash text primary key, " +
|
||||
# " key text, " +
|
||||
# " stream_name text, " +
|
||||
# " suggested_file_name text" +
|
||||
# ")")
|
||||
# c.execute("create table if not exists lbry_file_blobs (" +
|
||||
# " blob_hash text, " +
|
||||
# " stream_hash text, " +
|
||||
# " position integer, " +
|
||||
# " iv text, " +
|
||||
# " length integer, " +
|
||||
# " foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
# ")")
|
||||
# c.execute("create table if not exists lbry_file_descriptors (" +
|
||||
# " sd_blob_hash TEXT PRIMARY KEY, " +
|
||||
# " stream_hash TEXT, " +
|
||||
# " foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
# ")")
|
||||
# db_conn.commit()
|
||||
# c = db_conn.cursor()
|
||||
# for stream_hash, stream_info in stream_info_db.RangeIter():
|
||||
# key, name, suggested_file_name = json.loads(stream_info)
|
||||
# c.execute("insert into lbry_files values (?, ?, ?, ?)",
|
||||
# (stream_hash, key, name, suggested_file_name))
|
||||
# db_conn.commit()
|
||||
# c = db_conn.cursor()
|
||||
# for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
|
||||
# b_h, s_h = json.loads(blob_hash_stream_hash)
|
||||
# position, iv, length = json.loads(blob_info)
|
||||
# c.execute("insert into lbry_file_blobs values (?, ?, ?, ?, ?)",
|
||||
# (b_h, s_h, position, iv, length))
|
||||
# db_conn.commit()
|
||||
# c = db_conn.cursor()
|
||||
# for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
|
||||
# c.execute("insert into lbry_file_descriptors values (?, ?)",
|
||||
# (sd_blob_hash, stream_hash))
|
||||
# db_conn.commit()
|
||||
# db_conn.close()
|
||||
#
|
||||
#
|
||||
# def migrate_livestream_db(old_db_dir, new_db_dir):
|
||||
# old_db_path = os.path.join(old_db_dir, "stream_info.db")
|
||||
# if not os.path.exists(old_db_path):
|
||||
# return True
|
||||
# stream_info_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_info.db"))
|
||||
# stream_blob_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_blob.db"))
|
||||
# stream_desc_db = leveldb.LevelDB(os.path.join(old_db_dir, "stream_desc.db"))
|
||||
#
|
||||
# db_conn = sqlite3.connect(os.path.join(new_db_dir, "live_stream.db"))
|
||||
#
|
||||
# c = db_conn.cursor()
|
||||
#
|
||||
# c.execute("create table if not exists live_streams (" +
|
||||
# " stream_hash text primary key, " +
|
||||
# " public_key text, " +
|
||||
# " key text, " +
|
||||
# " stream_name text, " +
|
||||
# " next_announce_time real" +
|
||||
# ")")
|
||||
# c.execute("create table if not exists live_stream_blobs (" +
|
||||
# " blob_hash text, " +
|
||||
# " stream_hash text, " +
|
||||
# " position integer, " +
|
||||
# " revision integer, " +
|
||||
# " iv text, " +
|
||||
# " length integer, " +
|
||||
# " signature text, " +
|
||||
# " foreign key(stream_hash) references live_streams(stream_hash)" +
|
||||
# ")")
|
||||
# c.execute("create table if not exists live_stream_descriptors (" +
|
||||
# " sd_blob_hash TEXT PRIMARY KEY, " +
|
||||
# " stream_hash TEXT, " +
|
||||
# " foreign key(stream_hash) references live_streams(stream_hash)" +
|
||||
# ")")
|
||||
#
|
||||
# db_conn.commit()
|
||||
#
|
||||
# c = db_conn.cursor()
|
||||
# for stream_hash, stream_info in stream_info_db.RangeIter():
|
||||
# public_key, key, name, next_announce_time = json.loads(stream_info)
|
||||
# c.execute("insert into live_streams values (?, ?, ?, ?, ?)",
|
||||
# (stream_hash, public_key, key, name, next_announce_time))
|
||||
# db_conn.commit()
|
||||
# c = db_conn.cursor()
|
||||
# for blob_hash_stream_hash, blob_info in stream_blob_db.RangeIter():
|
||||
# b_h, s_h = json.loads(blob_hash_stream_hash)
|
||||
# position, revision, iv, length, signature = json.loads(blob_info)
|
||||
# c.execute("insert into live_stream_blobs values (?, ?, ?, ?, ?, ?, ?)",
|
||||
# (b_h, s_h, position, revision, iv, length, signature))
|
||||
# db_conn.commit()
|
||||
# c = db_conn.cursor()
|
||||
# for sd_blob_hash, stream_hash in stream_desc_db.RangeIter():
|
||||
# c.execute("insert into live_stream_descriptors values (?, ?)",
|
||||
# (sd_blob_hash, stream_hash))
|
||||
# db_conn.commit()
|
||||
# db_conn.close()
|
||||
#
|
||||
#
|
||||
# def migrate_ptc_db(old_db_dir, new_db_dir):
|
||||
# old_db_path = os.path.join(old_db_dir, "ptcwallet.db")
|
||||
# if not os.path.exists(old_db_path):
|
||||
# return True
|
||||
# old_db = leveldb.LevelDB(old_db_path)
|
||||
# try:
|
||||
# p_key = old_db.Get("private_key")
|
||||
# new_db = unqlite.UnQLite(os.path.join(new_db_dir, "ptcwallet.db"))
|
||||
# new_db['private_key'] = p_key
|
||||
# except KeyError:
|
||||
# pass
|
||||
#
|
||||
#
|
||||
# def migrate_lbryfile_manager_db(old_db_dir, new_db_dir):
|
||||
# old_db_path = os.path.join(old_db_dir, "lbryfiles.db")
|
||||
# if not os.path.exists(old_db_path):
|
||||
# return True
|
||||
# old_db = leveldb.LevelDB(old_db_path)
|
||||
# new_db = sqlite3.connect(os.path.join(new_db_dir, "lbryfile_info.db"))
|
||||
# c = new_db.cursor()
|
||||
# c.execute("create table if not exists lbry_file_options (" +
|
||||
# " blob_data_rate real, " +
|
||||
# " status text," +
|
||||
# " stream_hash text,"
|
||||
# " foreign key(stream_hash) references lbry_files(stream_hash)" +
|
||||
# ")")
|
||||
# new_db.commit()
|
||||
# LBRYFILE_STATUS = "t"
|
||||
# LBRYFILE_OPTIONS = "o"
|
||||
# c = new_db.cursor()
|
||||
# for k, v in old_db.RangeIter():
|
||||
# key_type, stream_hash = json.loads(k)
|
||||
# if key_type == LBRYFILE_STATUS:
|
||||
# try:
|
||||
# rate = json.loads(old_db.Get(json.dumps((LBRYFILE_OPTIONS, stream_hash))))[0]
|
||||
# except KeyError:
|
||||
# rate = None
|
||||
# c.execute("insert into lbry_file_options values (?, ?, ?)",
|
||||
# (rate, v, stream_hash))
|
||||
# new_db.commit()
|
||||
# new_db.close()
|
||||
#
|
||||
#
|
||||
# def migrate_settings_db(old_db_dir, new_db_dir):
|
||||
# old_settings_db_path = os.path.join(old_db_dir, "settings.db")
|
||||
# if not os.path.exists(old_settings_db_path):
|
||||
# return True
|
||||
# old_db = leveldb.LevelDB(old_settings_db_path)
|
||||
# new_db = unqlite.UnQLite(os.path.join(new_db_dir, "settings.db"))
|
||||
# for k, v in old_db.RangeIter():
|
||||
# new_db[k] = v
|
||||
#
|
||||
#
|
||||
# def migrate_repeater_db(old_db_dir, new_db_dir):
|
||||
# old_repeater_db_path = os.path.join(old_db_dir, "valuable_blobs.db")
|
||||
# if not os.path.exists(old_repeater_db_path):
|
||||
# return True
|
||||
# old_db = leveldb.LevelDB(old_repeater_db_path)
|
||||
# info_db = sqlite3.connect(os.path.join(new_db_dir, "blind_info.db"))
|
||||
# peer_db = sqlite3.connect(os.path.join(new_db_dir, "blind_peers.db"))
|
||||
# unql_db = unqlite.UnQLite(os.path.join(new_db_dir, "blind_settings.db"))
|
||||
# BLOB_INFO_TYPE = 'b'
|
||||
# SETTING_TYPE = 's'
|
||||
# PEER_TYPE = 'p'
|
||||
# info_c = info_db.cursor()
|
||||
# info_c.execute("create table if not exists valuable_blobs (" +
|
||||
# " blob_hash text primary key, " +
|
||||
# " blob_length integer, " +
|
||||
# " reference text, " +
|
||||
# " peer_host text, " +
|
||||
# " peer_port integer, " +
|
||||
# " peer_score text" +
|
||||
# ")")
|
||||
# info_db.commit()
|
||||
# peer_c = peer_db.cursor()
|
||||
# peer_c.execute("create table if not exists approved_peers (" +
|
||||
# " ip_address text, " +
|
||||
# " port integer" +
|
||||
# ")")
|
||||
# peer_db.commit()
|
||||
# info_c = info_db.cursor()
|
||||
# peer_c = peer_db.cursor()
|
||||
# for k, v in old_db.RangeIter():
|
||||
# key_type, key_rest = json.loads(k)
|
||||
# if key_type == PEER_TYPE:
|
||||
# host, port = key_rest
|
||||
# peer_c.execute("insert into approved_peers values (?, ?)",
|
||||
# (host, port))
|
||||
# elif key_type == SETTING_TYPE:
|
||||
# unql_db[key_rest] = v
|
||||
# elif key_type == BLOB_INFO_TYPE:
|
||||
# blob_hash = key_rest
|
||||
# length, reference, peer_host, peer_port, peer_score = json.loads(v)
|
||||
# info_c.execute("insert into valuable_blobs values (?, ?, ?, ?, ?, ?)",
|
||||
# (blob_hash, length, reference, peer_host, peer_port, peer_score))
|
||||
# info_db.commit()
|
||||
# peer_db.commit()
|
||||
# info_db.close()
|
||||
# peer_db.close()
|
|
@ -12,7 +12,7 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class DBLBRYFileMetadataManager(object):
|
||||
"""Store and provide access to LBRY file metadata using leveldb files"""
|
||||
"""Store and provide access to LBRY file metadata using sqlite"""
|
||||
|
||||
def __init__(self, db_dir):
|
||||
self.db_dir = db_dir
|
||||
|
|
|
@ -167,7 +167,7 @@ class LBRYConsole():
|
|||
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
|
||||
|
||||
def _check_db_migration(self):
|
||||
old_revision = 0
|
||||
old_revision = 1
|
||||
db_revision_file = os.path.join(self.db_dir, "db_revision")
|
||||
if os.path.exists(db_revision_file):
|
||||
old_revision = int(open(db_revision_file).read().strip())
|
||||
|
|
|
@ -952,7 +952,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
log.debug("Created the blobfile directory: %s", str(self.blobfile_dir))
|
||||
|
||||
def _check_db_migration(self):
|
||||
old_revision = 0
|
||||
old_revision = 1
|
||||
db_revision_file = os.path.join(self.db_dir, "db_revision")
|
||||
if os.path.exists(db_revision_file):
|
||||
old_revision = int(open(db_revision_file).read().strip())
|
||||
|
|
|
@ -289,16 +289,17 @@ if __name__ == '__main__':
|
|||
lbry_daemon = JSONRPCProxy.from_url(API_CONNECTION_STRING)
|
||||
|
||||
try:
|
||||
started_daemon = lbry_daemon.is_running()
|
||||
daemon_running = lbry_daemon.is_running()
|
||||
start_daemon = False
|
||||
except:
|
||||
started_daemon = False
|
||||
start_daemon = True
|
||||
|
||||
try:
|
||||
lbry_name = LBRYURIHandler.parse_name(sys.argv[1])
|
||||
except IndexError:
|
||||
lbry_name = None
|
||||
|
||||
if started_daemon:
|
||||
LBRYURIHandler.open_address(lbry_name)
|
||||
else:
|
||||
if start_daemon:
|
||||
main(lbry_name)
|
||||
else:
|
||||
LBRYURIHandler.open_address(lbry_name)
|
334
setup.py
334
setup.py
|
@ -1,51 +1,301 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from lbrynet import __version__
|
||||
|
||||
import ez_setup
|
||||
ez_setup.use_setuptools()
|
||||
import sys
|
||||
import os
|
||||
from setuptools import setup, find_packages
|
||||
from lbrynet import __version__
|
||||
|
||||
base_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
|
||||
'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
|
||||
'lbrynet-create-network = lbrynet.create_network:main',
|
||||
'lbrynet-launch-node = lbrynet.dht.node:main',
|
||||
'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
|
||||
'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
|
||||
'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
|
||||
'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
|
||||
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
|
||||
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
|
||||
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main']
|
||||
|
||||
requires = ['pycrypto', 'twisted', 'miniupnpc', 'yapsy', 'seccure',
|
||||
'python-bitcoinrpc==0.1', 'txJSON-RPC', 'requests>=2.4.2', 'unqlite==0.2.0',
|
||||
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance',
|
||||
'requests_futures']
|
||||
LINUX = 1
|
||||
DARWIN = 2
|
||||
WINDOWS = 3
|
||||
|
||||
if sys.platform.startswith("linux"):
|
||||
platform = LINUX
|
||||
elif sys.platform.startswith("darwin"):
|
||||
platform = DARWIN
|
||||
elif sys.platform.startswith("win"):
|
||||
platform = WINDOWS
|
||||
else:
|
||||
raise Exception("Unknown os: %s" % sys.platform)
|
||||
|
||||
base_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
package_name = "lbrynet"
|
||||
dist_name = "LBRY"
|
||||
description = "A decentralized media library and marketplace"
|
||||
author = "LBRY, Inc"
|
||||
url = "lbry.io"
|
||||
maintainer = "Jack Robison"
|
||||
maintainer_email = "jack@lbry.io"
|
||||
keywords = "LBRY"
|
||||
|
||||
requires = [
|
||||
'pycrypto',
|
||||
'twisted',
|
||||
'miniupnpc',
|
||||
'yapsy',
|
||||
'seccure',
|
||||
'python-bitcoinrpc==0.1',
|
||||
'txJSON-RPC',
|
||||
'requests>=2.4.2',
|
||||
'unqlite==0.2.0',
|
||||
'lbryum',
|
||||
'jsonrpc',
|
||||
'simplejson',
|
||||
'appdirs',
|
||||
'six==1.9.0',
|
||||
'base58',
|
||||
'googlefinance',
|
||||
'requests_futures'
|
||||
]
|
||||
|
||||
console_scripts = [
|
||||
# 'lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUploader:launch_stdin_uploader',
|
||||
# 'lbrynet-stdout-downloader = lbrynet.lbrynet_console.LBRYStdoutDownloader:launch_stdout_downloader',
|
||||
# 'lbrynet-create-network = lbrynet.create_network:main',
|
||||
# 'lbrynet-launch-node = lbrynet.dht.node:main',
|
||||
# 'lbrynet-launch-rpc-node = lbrynet.rpc_node:main',
|
||||
# 'lbrynet-rpc-node-cli = lbrynet.node_rpc_cli:main',
|
||||
# 'lbrynet-lookup-hosts-for-hash = lbrynet.dht_scripts:get_hosts_for_hash_in_dht',
|
||||
# 'lbrynet-announce_hash_to_dht = lbrynet.dht_scripts:announce_hash_to_dht',
|
||||
'lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:start',
|
||||
'stop-lbrynet-daemon = lbrynet.lbrynet_daemon.LBRYDaemonControl:stop',
|
||||
'lbrynet-cli = lbrynet.lbrynet_daemon.LBRYDaemonCLI:main'
|
||||
]
|
||||
|
||||
if platform == LINUX:
|
||||
import ez_setup
|
||||
ez_setup.use_setuptools()
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
requires.append('service-identity')
|
||||
|
||||
setup(name='lbrynet',
|
||||
description='A decentralized media library and marketplace',
|
||||
version=__version__,
|
||||
maintainer='Alex Grintsvayg',
|
||||
maintainer_email='grin@lbry.io',
|
||||
packages=find_packages(base_dir),
|
||||
install_requires=requires,
|
||||
entry_points={'console_scripts': console_scripts},
|
||||
data_files=[
|
||||
('lbrynet/lbrynet_console/plugins',
|
||||
[
|
||||
os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins',
|
||||
'blindrepeater.yapsy-plugin')
|
||||
]
|
||||
),
|
||||
],
|
||||
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'],
|
||||
)
|
||||
setup(name=package_name,
|
||||
description=description,
|
||||
version=__version__,
|
||||
maintainer=maintainer,
|
||||
maintainer_email=maintainer_email,
|
||||
url=url,
|
||||
author=author,
|
||||
keywords=keywords,
|
||||
packages=find_packages(base_dir),
|
||||
install_requires=requires,
|
||||
entry_points={'console_scripts': console_scripts},
|
||||
data_files=[
|
||||
('lbrynet/lbrynet_console/plugins',
|
||||
[
|
||||
os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins',
|
||||
'blindrepeater.yapsy-plugin')
|
||||
]
|
||||
),
|
||||
],
|
||||
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'],
|
||||
)
|
||||
|
||||
elif platform == DARWIN:
|
||||
import ez_setup
|
||||
|
||||
ez_setup.use_setuptools()
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name=package_name,
|
||||
description=description,
|
||||
version=__version__,
|
||||
maintainer=maintainer,
|
||||
maintainer_email=maintainer_email,
|
||||
url=url,
|
||||
author=author,
|
||||
keywords=keywords,
|
||||
packages=find_packages(base_dir),
|
||||
install_requires=requires,
|
||||
entry_points={'console_scripts': console_scripts},
|
||||
data_files=[
|
||||
('lbrynet/lbrynet_console/plugins',
|
||||
[
|
||||
os.path.join(base_dir, 'lbrynet', 'lbrynet_console', 'plugins',
|
||||
'blindrepeater.yapsy-plugin')
|
||||
]
|
||||
),
|
||||
],
|
||||
dependency_links=['https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum'],
|
||||
)
|
||||
|
||||
elif platform == WINDOWS:
|
||||
import opcode
|
||||
import pkg_resources
|
||||
from cx_Freeze import setup, Executable
|
||||
import requests.certs
|
||||
|
||||
win_icon = os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico')
|
||||
wordlist_path = pkg_resources.resource_filename('lbryum', 'wordlist')
|
||||
|
||||
# Allow virtualenv to find distutils of base python installation
|
||||
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
|
||||
|
||||
def find_data_file(filename):
|
||||
if getattr(sys, 'frozen', False):
|
||||
# The application is frozen
|
||||
data_dir = os.path.dirname(sys.executable)
|
||||
else:
|
||||
# The application is not frozen
|
||||
# Change this bit to match where you store your data files:
|
||||
data_dir = os.path.dirname(__file__)
|
||||
return os.path.join(data_dir, filename)
|
||||
|
||||
if os.path.isdir("C:\Program Files (x86)"):
|
||||
shortcut_icon = 'C:\Program Files (x86)\lbrynet\icons\lbry256.ico'
|
||||
else:
|
||||
shortcut_icon = 'C:\Program Files\lbrynet\icons\lbry256.ico'
|
||||
|
||||
shortcut_table = [
|
||||
('DesktopShortcut', # Shortcut
|
||||
'DesktopFolder', # Directory
|
||||
'lbrynet-daemon', # Name
|
||||
'TARGETDIR', # Component
|
||||
'[TARGETDIR]\lbrynet-daemon.exe', # Target
|
||||
'--log-to-console', # Arguments
|
||||
description, # Description
|
||||
None, # Hotkey
|
||||
shortcut_icon, # Icon
|
||||
None, # IconIndex
|
||||
None, # ShowCmd
|
||||
'TARGETDIR', # WkDir
|
||||
),
|
||||
('DaemonShortcut', # Shortcut
|
||||
'DesktopFolder', # Directory
|
||||
'LBRY', # Name
|
||||
'TARGETDIR', # Component
|
||||
'[TARGETDIR]\{0}.exe'.format(dist_name), # Target
|
||||
None, # Arguments
|
||||
description, # Description
|
||||
None, # Hotkey
|
||||
shortcut_icon, # Icon
|
||||
None, # IconIndex
|
||||
None, # ShowCmd
|
||||
'TARGETDIR', # WkDir
|
||||
),
|
||||
('DaemonCLIShortcut', # Shortcut
|
||||
'DesktopFolder', # Directory
|
||||
'lbrynet-cli', # Name
|
||||
'TARGETDIR', # Component
|
||||
'[TARGETDIR]\lbrynet-cli.exe', # Target
|
||||
None, # Arguments
|
||||
description, # Description
|
||||
None, # Hotkey
|
||||
shortcut_icon, # Icon
|
||||
None, # IconIndex
|
||||
None, # ShowCmd
|
||||
'TARGETDIR', # WkDir
|
||||
),
|
||||
]
|
||||
|
||||
msi_data = {"Shortcut": shortcut_table}
|
||||
|
||||
bdist_msi_options = {
|
||||
'upgrade_code': '{18c0e933-ad08-44e8-a413-1d0ed624c100}',
|
||||
'add_to_path': True,
|
||||
# Default install path is 'C:\Program Files\' for 32-bit or 'C:\Program Files (x86)\' for 64-bit
|
||||
# 'initial_target_dir': r'[LocalAppDataFolder]\{0}'.format(name),
|
||||
'data': msi_data
|
||||
}
|
||||
|
||||
build_exe_options = {
|
||||
'include_msvcr': True,
|
||||
'includes': [],
|
||||
'packages': ['cython',
|
||||
'twisted',
|
||||
'yapsy',
|
||||
'appdirs',
|
||||
'argparse',
|
||||
'base58',
|
||||
'colorama',
|
||||
'cx_Freeze',
|
||||
'dns',
|
||||
'ecdsa',
|
||||
'gmpy',
|
||||
'googlefinance',
|
||||
'jsonrpc',
|
||||
'jsonrpclib',
|
||||
'lbryum',
|
||||
'loggly',
|
||||
'miniupnpc',
|
||||
'pbkdf2',
|
||||
'google.protobuf',
|
||||
'Crypto',
|
||||
'bitcoinrpc',
|
||||
'win32api',
|
||||
'qrcode',
|
||||
'requests',
|
||||
'requests_futures',
|
||||
'seccure',
|
||||
'simplejson',
|
||||
'six',
|
||||
'aes',
|
||||
'txjsonrpc',
|
||||
'unqlite',
|
||||
'wsgiref',
|
||||
'zope.interface',
|
||||
'os',
|
||||
'pkg_resources'
|
||||
],
|
||||
'excludes': ['distutils', 'collections.sys', 'collections._weakref', 'collections.abc',
|
||||
'Tkinter', 'tk', 'tcl', 'PyQt4', 'nose', 'mock'
|
||||
'zope.interface._zope_interface_coptimizations', 'leveldb'],
|
||||
'include_files': [(distutils_path, 'distutils'), (requests.certs.where(), 'cacert.pem'),
|
||||
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry16.ico'),
|
||||
os.path.join('icons', 'lbry16.ico')),
|
||||
(os.path.join('packaging', 'windows', 'lbry-win32-app', 'icons', 'lbry256.ico'),
|
||||
os.path.join('icons', 'lbry256.ico')),
|
||||
(os.path.join(wordlist_path, 'chinese_simplified.txt'),
|
||||
os.path.join('wordlist', 'chinese_simplified.txt')),
|
||||
(os.path.join(wordlist_path, 'english.txt'), os.path.join('wordlist', 'english.txt')),
|
||||
(os.path.join(wordlist_path, 'japanese.txt'), os.path.join('wordlist', 'japanese.txt')),
|
||||
(os.path.join(wordlist_path, 'portuguese.txt'), os.path.join('wordlist', 'portuguese.txt')),
|
||||
(os.path.join(wordlist_path, 'spanish.txt'), os.path.join('wordlist', 'spanish.txt'))
|
||||
],
|
||||
'namespace_packages': ['zope', 'google']}
|
||||
|
||||
tray_app = Executable(
|
||||
script=os.path.join('packaging', 'windows', 'lbry-win32-app', 'LBRYWin32App.py'),
|
||||
base='Win32GUI',
|
||||
icon=win_icon,
|
||||
compress=True,
|
||||
shortcutName=dist_name,
|
||||
shortcutDir='DesktopFolder',
|
||||
targetName='{0}.exe'.format(dist_name)
|
||||
# targetDir="LocalAppDataFolder"
|
||||
)
|
||||
|
||||
daemon_dir = os.path.join('lbrynet', 'lbrynet_daemon')
|
||||
daemon_exe = Executable(
|
||||
script=os.path.join(daemon_dir, 'LBRYDaemonControl.py'),
|
||||
icon=win_icon,
|
||||
shortcutName="lbrynet-daemon",
|
||||
shortcutDir='DesktopFolder',
|
||||
targetName='lbrynet-daemon.exe'
|
||||
)
|
||||
|
||||
cli_exe = Executable(
|
||||
script=os.path.join(daemon_dir, 'LBRYDaemonCLI.py'),
|
||||
icon=win_icon,
|
||||
shortcutName="lbrynet-cli",
|
||||
shortcutDir='DesktopFolder',
|
||||
targetName='lbrynet-cli.exe'
|
||||
)
|
||||
|
||||
setup(
|
||||
name=package_name,
|
||||
description=description,
|
||||
version=__version__,
|
||||
maintainer=maintainer,
|
||||
maintainer_email=maintainer_email,
|
||||
url=url,
|
||||
author=author,
|
||||
keywords=keywords,
|
||||
data_files=[],
|
||||
options={'build_exe': build_exe_options,
|
||||
'bdist_msi': bdist_msi_options},
|
||||
executables=[
|
||||
tray_app,
|
||||
daemon_exe,
|
||||
cli_exe
|
||||
],
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue