remove unused files
This commit is contained in:
parent
986fcd1660
commit
00edd3cb66
12 changed files with 2 additions and 641 deletions
|
@ -1,20 +0,0 @@
|
||||||
[bumpversion]
|
|
||||||
current_version = 0.15.0
|
|
||||||
commit = True
|
|
||||||
tag = True
|
|
||||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?
|
|
||||||
serialize =
|
|
||||||
{major}.{minor}.{patch}{release}{candidate}
|
|
||||||
{major}.{minor}.{patch}
|
|
||||||
|
|
||||||
[bumpversion:part:candidate]
|
|
||||||
first_value = 1
|
|
||||||
|
|
||||||
[bumpversion:part:release]
|
|
||||||
optional_value = production
|
|
||||||
values =
|
|
||||||
rc
|
|
||||||
production
|
|
||||||
|
|
||||||
[bumpversion:file:lbrynet/__init__.py]
|
|
||||||
|
|
0
.gitmodules
vendored
0
.gitmodules
vendored
|
@ -32,7 +32,8 @@ at anytime.
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
* Removed claim related filter arguments `name`, `claim_id`, and `outpoint` from `file_list`, `file_delete`, `file_set_status`, and `file_reflect`
|
* Removed claim related filter arguments `name`, `claim_id`, and `outpoint` from `file_list`, `file_delete`, `file_set_status`, and `file_reflect`
|
||||||
*
|
* Removed unused files
|
||||||
|
* Removed old and unused UI related code
|
||||||
|
|
||||||
|
|
||||||
## [0.18.0] - 2017-11-08
|
## [0.18.0] - 2017-11-08
|
||||||
|
|
BIN
app.icns
BIN
app.icns
Binary file not shown.
332
ez_setup.py
332
ez_setup.py
|
@ -1,332 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
"""Bootstrap setuptools installation
|
|
||||||
|
|
||||||
To use setuptools in your package's setup.py, include this
|
|
||||||
file in the same directory and add this to the top of your setup.py::
|
|
||||||
|
|
||||||
from ez_setup import use_setuptools
|
|
||||||
use_setuptools()
|
|
||||||
|
|
||||||
To require a specific version of setuptools, set a download
|
|
||||||
mirror, or use an alternate download directory, simply supply
|
|
||||||
the appropriate options to ``use_setuptools()``.
|
|
||||||
|
|
||||||
This file can also be run as a script to install or upgrade setuptools.
|
|
||||||
"""
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import zipfile
|
|
||||||
import optparse
|
|
||||||
import subprocess
|
|
||||||
import platform
|
|
||||||
import textwrap
|
|
||||||
import contextlib
|
|
||||||
|
|
||||||
from distutils import log
|
|
||||||
|
|
||||||
try:
|
|
||||||
from urllib.request import urlopen
|
|
||||||
except ImportError:
|
|
||||||
from urllib2 import urlopen
|
|
||||||
|
|
||||||
try:
|
|
||||||
from site import USER_SITE
|
|
||||||
except ImportError:
|
|
||||||
USER_SITE = None
|
|
||||||
|
|
||||||
DEFAULT_VERSION = "4.0.1"
|
|
||||||
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
|
|
||||||
|
|
||||||
def _python_cmd(*args):
|
|
||||||
"""
|
|
||||||
Return True if the command succeeded.
|
|
||||||
"""
|
|
||||||
args = (sys.executable,) + args
|
|
||||||
return subprocess.call(args) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def _install(archive_filename, install_args=()):
|
|
||||||
with archive_context(archive_filename):
|
|
||||||
# installing
|
|
||||||
log.warn('Installing Setuptools')
|
|
||||||
if not _python_cmd('setup.py', 'install', *install_args):
|
|
||||||
log.warn('Something went wrong during the installation.')
|
|
||||||
log.warn('See the error message above.')
|
|
||||||
# exitcode will be 2
|
|
||||||
return 2
|
|
||||||
|
|
||||||
|
|
||||||
def _build_egg(egg, archive_filename, to_dir):
|
|
||||||
with archive_context(archive_filename):
|
|
||||||
# building an egg
|
|
||||||
log.warn('Building a Setuptools egg in %s', to_dir)
|
|
||||||
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
|
|
||||||
# returning the result
|
|
||||||
log.warn(egg)
|
|
||||||
if not os.path.exists(egg):
|
|
||||||
raise IOError('Could not build the egg.')
|
|
||||||
|
|
||||||
|
|
||||||
class ContextualZipFile(zipfile.ZipFile):
|
|
||||||
"""
|
|
||||||
Supplement ZipFile class to support context manager for Python 2.6
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback):
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Construct a ZipFile or ContextualZipFile as appropriate
|
|
||||||
"""
|
|
||||||
if hasattr(zipfile.ZipFile, '__exit__'):
|
|
||||||
return zipfile.ZipFile(*args, **kwargs)
|
|
||||||
return super(ContextualZipFile, cls).__new__(cls)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def archive_context(filename):
|
|
||||||
# extracting the archive
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
|
||||||
log.warn('Extracting in %s', tmpdir)
|
|
||||||
old_wd = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir(tmpdir)
|
|
||||||
with ContextualZipFile(filename) as archive:
|
|
||||||
archive.extractall()
|
|
||||||
|
|
||||||
# going in the directory
|
|
||||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
|
||||||
os.chdir(subdir)
|
|
||||||
log.warn('Now working in %s', subdir)
|
|
||||||
yield
|
|
||||||
|
|
||||||
finally:
|
|
||||||
os.chdir(old_wd)
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
|
|
||||||
|
|
||||||
def _do_download(version, download_base, to_dir, download_delay):
|
|
||||||
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
|
|
||||||
% (version, sys.version_info[0], sys.version_info[1]))
|
|
||||||
if not os.path.exists(egg):
|
|
||||||
archive = download_setuptools(version, download_base,
|
|
||||||
to_dir, download_delay)
|
|
||||||
_build_egg(egg, archive, to_dir)
|
|
||||||
sys.path.insert(0, egg)
|
|
||||||
|
|
||||||
# Remove previously-imported pkg_resources if present (see
|
|
||||||
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
|
|
||||||
if 'pkg_resources' in sys.modules:
|
|
||||||
del sys.modules['pkg_resources']
|
|
||||||
|
|
||||||
import setuptools
|
|
||||||
setuptools.bootstrap_install_from = egg
|
|
||||||
|
|
||||||
|
|
||||||
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
|
||||||
to_dir=os.curdir, download_delay=15):
|
|
||||||
to_dir = os.path.abspath(to_dir)
|
|
||||||
rep_modules = 'pkg_resources', 'setuptools'
|
|
||||||
imported = set(sys.modules).intersection(rep_modules)
|
|
||||||
try:
|
|
||||||
import pkg_resources
|
|
||||||
except ImportError:
|
|
||||||
return _do_download(version, download_base, to_dir, download_delay)
|
|
||||||
try:
|
|
||||||
pkg_resources.require("setuptools>=" + version)
|
|
||||||
return
|
|
||||||
except pkg_resources.DistributionNotFound:
|
|
||||||
return _do_download(version, download_base, to_dir, download_delay)
|
|
||||||
except pkg_resources.VersionConflict as VC_err:
|
|
||||||
if imported:
|
|
||||||
msg = textwrap.dedent("""
|
|
||||||
The required version of setuptools (>={version}) is not available,
|
|
||||||
and can't be installed while this script is running. Please
|
|
||||||
install a more recent version first, using
|
|
||||||
'easy_install -U setuptools'.
|
|
||||||
|
|
||||||
(Currently using {VC_err.args[0]!r})
|
|
||||||
""").format(VC_err=VC_err, version=version)
|
|
||||||
sys.stderr.write(msg)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
# otherwise, reload ok
|
|
||||||
del pkg_resources, sys.modules['pkg_resources']
|
|
||||||
return _do_download(version, download_base, to_dir, download_delay)
|
|
||||||
|
|
||||||
def _clean_check(cmd, target):
|
|
||||||
"""
|
|
||||||
Run the command to download target. If the command fails, clean up before
|
|
||||||
re-raising the error.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
subprocess.check_call(cmd)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
if os.access(target, os.F_OK):
|
|
||||||
os.unlink(target)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def download_file_powershell(url, target):
|
|
||||||
"""
|
|
||||||
Download the file at url to target using Powershell (which will validate
|
|
||||||
trust). Raise an exception if the command cannot complete.
|
|
||||||
"""
|
|
||||||
target = os.path.abspath(target)
|
|
||||||
ps_cmd = (
|
|
||||||
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
|
|
||||||
"[System.Net.CredentialCache]::DefaultCredentials; "
|
|
||||||
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
|
|
||||||
% vars()
|
|
||||||
)
|
|
||||||
cmd = [
|
|
||||||
'powershell',
|
|
||||||
'-Command',
|
|
||||||
ps_cmd,
|
|
||||||
]
|
|
||||||
_clean_check(cmd, target)
|
|
||||||
|
|
||||||
def has_powershell():
|
|
||||||
if platform.system() != 'Windows':
|
|
||||||
return False
|
|
||||||
cmd = ['powershell', '-Command', 'echo test']
|
|
||||||
with open(os.path.devnull, 'wb') as devnull:
|
|
||||||
try:
|
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
download_file_powershell.viable = has_powershell
|
|
||||||
|
|
||||||
def download_file_curl(url, target):
|
|
||||||
cmd = ['curl', url, '--silent', '--output', target]
|
|
||||||
_clean_check(cmd, target)
|
|
||||||
|
|
||||||
def has_curl():
|
|
||||||
cmd = ['curl', '--version']
|
|
||||||
with open(os.path.devnull, 'wb') as devnull:
|
|
||||||
try:
|
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
download_file_curl.viable = has_curl
|
|
||||||
|
|
||||||
def download_file_wget(url, target):
|
|
||||||
cmd = ['wget', url, '--quiet', '--output-document', target]
|
|
||||||
_clean_check(cmd, target)
|
|
||||||
|
|
||||||
def has_wget():
|
|
||||||
cmd = ['wget', '--version']
|
|
||||||
with open(os.path.devnull, 'wb') as devnull:
|
|
||||||
try:
|
|
||||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
download_file_wget.viable = has_wget
|
|
||||||
|
|
||||||
def download_file_insecure(url, target):
|
|
||||||
"""
|
|
||||||
Use Python to download the file, even though it cannot authenticate the
|
|
||||||
connection.
|
|
||||||
"""
|
|
||||||
src = urlopen(url)
|
|
||||||
try:
|
|
||||||
# Read all the data in one block.
|
|
||||||
data = src.read()
|
|
||||||
finally:
|
|
||||||
src.close()
|
|
||||||
|
|
||||||
# Write all the data in one block to avoid creating a partial file.
|
|
||||||
with open(target, "wb") as dst:
|
|
||||||
dst.write(data)
|
|
||||||
|
|
||||||
download_file_insecure.viable = lambda: True
|
|
||||||
|
|
||||||
def get_best_downloader():
|
|
||||||
downloaders = (
|
|
||||||
download_file_powershell,
|
|
||||||
download_file_curl,
|
|
||||||
download_file_wget,
|
|
||||||
download_file_insecure,
|
|
||||||
)
|
|
||||||
viable_downloaders = (dl for dl in downloaders if dl.viable())
|
|
||||||
return next(viable_downloaders, None)
|
|
||||||
|
|
||||||
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
|
||||||
to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
|
|
||||||
"""
|
|
||||||
Download setuptools from a specified location and return its filename
|
|
||||||
|
|
||||||
`version` should be a valid setuptools version number that is available
|
|
||||||
as an egg for download under the `download_base` URL (which should end
|
|
||||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
|
||||||
`delay` is the number of seconds to pause before an actual download
|
|
||||||
attempt.
|
|
||||||
|
|
||||||
``downloader_factory`` should be a function taking no arguments and
|
|
||||||
returning a function for downloading a URL to a target.
|
|
||||||
"""
|
|
||||||
# making sure we use the absolute path
|
|
||||||
to_dir = os.path.abspath(to_dir)
|
|
||||||
zip_name = "setuptools-%s.zip" % version
|
|
||||||
url = download_base + zip_name
|
|
||||||
saveto = os.path.join(to_dir, zip_name)
|
|
||||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
|
||||||
log.warn("Downloading %s", url)
|
|
||||||
downloader = downloader_factory()
|
|
||||||
downloader(url, saveto)
|
|
||||||
return os.path.realpath(saveto)
|
|
||||||
|
|
||||||
def _build_install_args(options):
|
|
||||||
"""
|
|
||||||
Build the arguments to 'python setup.py install' on the setuptools package
|
|
||||||
"""
|
|
||||||
return ['--user'] if options.user_install else []
|
|
||||||
|
|
||||||
def _parse_args():
|
|
||||||
"""
|
|
||||||
Parse the command line for options
|
|
||||||
"""
|
|
||||||
parser = optparse.OptionParser()
|
|
||||||
parser.add_option(
|
|
||||||
'--user', dest='user_install', action='store_true', default=False,
|
|
||||||
help='install in user site package (requires Python 2.6 or later)')
|
|
||||||
parser.add_option(
|
|
||||||
'--download-base', dest='download_base', metavar="URL",
|
|
||||||
default=DEFAULT_URL,
|
|
||||||
help='alternative URL from where to download the setuptools package')
|
|
||||||
parser.add_option(
|
|
||||||
'--insecure', dest='downloader_factory', action='store_const',
|
|
||||||
const=lambda: download_file_insecure, default=get_best_downloader,
|
|
||||||
help='Use internal, non-validating downloader'
|
|
||||||
)
|
|
||||||
parser.add_option(
|
|
||||||
'--version', help="Specify which version to download",
|
|
||||||
default=DEFAULT_VERSION,
|
|
||||||
)
|
|
||||||
options, args = parser.parse_args()
|
|
||||||
# positional arguments are ignored
|
|
||||||
return options
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Install or upgrade setuptools and EasyInstall"""
|
|
||||||
options = _parse_args()
|
|
||||||
archive = download_setuptools(
|
|
||||||
version=options.version,
|
|
||||||
download_base=options.download_base,
|
|
||||||
downloader_factory=options.downloader_factory,
|
|
||||||
)
|
|
||||||
return _install(archive, _build_install_args(options))
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(main())
|
|
|
@ -1,185 +0,0 @@
|
||||||
import time
|
|
||||||
import cgi
|
|
||||||
import mimetools
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
from twisted.web import server
|
|
||||||
|
|
||||||
|
|
||||||
class DaemonRequest(server.Request):
|
|
||||||
"""
|
|
||||||
For LBRY specific request functionality. Currently just provides
|
|
||||||
handling for large multipart POST requests, taken from here:
|
|
||||||
http://sammitch.ca/2013/07/handling-large-requests-in-twisted/
|
|
||||||
|
|
||||||
For multipart POST requests, this populates self.args with temp
|
|
||||||
file objects instead of strings. Note that these files don't auto-delete
|
|
||||||
on close because we want to be able to move and rename them.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# max amount of memory to allow any ~single~ request argument [ie: POSTed file]
|
|
||||||
# note: this value seems to be taken with a grain of salt, memory usage may spike
|
|
||||||
# FAR above this value in some cases.
|
|
||||||
# eg: set the memory limit to 5 MB, write 2 blocks of 4MB, mem usage will
|
|
||||||
# have spiked to 8MB before the data is rolled to disk after the
|
|
||||||
# second write completes.
|
|
||||||
memorylimit = 1024*1024*100
|
|
||||||
|
|
||||||
# enable/disable debug logging
|
|
||||||
do_log = False
|
|
||||||
|
|
||||||
# re-defined only for debug/logging purposes
|
|
||||||
def gotLength(self, length):
|
|
||||||
if self.do_log:
|
|
||||||
print '%f Headers received, Content-Length: %d' % (time.time(), length)
|
|
||||||
server.Request.gotLength(self, length)
|
|
||||||
|
|
||||||
# re-definition of twisted.web.server.Request.requestreceived, the only difference
|
|
||||||
# is that self.parse_multipart() is used rather than cgi.parse_multipart()
|
|
||||||
def requestReceived(self, command, path, version):
|
|
||||||
from twisted.web.http import parse_qs
|
|
||||||
if self.do_log:
|
|
||||||
print '%f Request Received' % time.time()
|
|
||||||
|
|
||||||
self.content.seek(0, 0)
|
|
||||||
self.args = {}
|
|
||||||
self.stack = []
|
|
||||||
|
|
||||||
self.method, self.uri = command, path
|
|
||||||
self.clientproto = version
|
|
||||||
x = self.uri.split(b'?', 1)
|
|
||||||
|
|
||||||
if len(x) == 1:
|
|
||||||
self.path = self.uri
|
|
||||||
else:
|
|
||||||
self.path, argstring = x
|
|
||||||
self.args = parse_qs(argstring, 1)
|
|
||||||
|
|
||||||
# cache the client and server information, we'll need this later to be
|
|
||||||
# serialized and sent with the request so CGIs will work remotely
|
|
||||||
self.client = self.channel.transport.getPeer()
|
|
||||||
self.host = self.channel.transport.getHost()
|
|
||||||
|
|
||||||
# Argument processing
|
|
||||||
args = self.args
|
|
||||||
ctype = self.requestHeaders.getRawHeaders(b'content-type')
|
|
||||||
if ctype is not None:
|
|
||||||
ctype = ctype[0]
|
|
||||||
|
|
||||||
if self.method == b"POST" and ctype:
|
|
||||||
mfd = b'multipart/form-data'
|
|
||||||
key, pdict = cgi.parse_header(ctype)
|
|
||||||
if key == b'application/x-www-form-urlencoded':
|
|
||||||
args.update(parse_qs(self.content.read(), 1))
|
|
||||||
elif key == mfd:
|
|
||||||
try:
|
|
||||||
self.content.seek(0, 0)
|
|
||||||
args.update(self.parse_multipart(self.content, pdict))
|
|
||||||
|
|
||||||
except KeyError as e:
|
|
||||||
if e.args[0] == b'content-disposition':
|
|
||||||
# Parse_multipart can't cope with missing
|
|
||||||
# content-dispostion headers in multipart/form-data
|
|
||||||
# parts, so we catch the exception and tell the client
|
|
||||||
# it was a bad request.
|
|
||||||
self.channel.transport.write(
|
|
||||||
b"HTTP/1.1 400 Bad Request\r\n\r\n")
|
|
||||||
self.channel.transport.loseConnection()
|
|
||||||
return
|
|
||||||
raise
|
|
||||||
|
|
||||||
self.content.seek(0, 0)
|
|
||||||
|
|
||||||
self.process()
|
|
||||||
|
|
||||||
# re-definition of cgi.parse_multipart that uses a single temporary file to store
|
|
||||||
# data rather than storing 2 to 3 copies in various lists.
|
|
||||||
def parse_multipart(self, fp, pdict):
|
|
||||||
if self.do_log:
|
|
||||||
print '%f Parsing Multipart data: ' % time.time()
|
|
||||||
rewind = fp.tell() #save cursor
|
|
||||||
fp.seek(0, 0) #reset cursor
|
|
||||||
|
|
||||||
boundary = ""
|
|
||||||
if 'boundary' in pdict:
|
|
||||||
boundary = pdict['boundary']
|
|
||||||
if not cgi.valid_boundary(boundary):
|
|
||||||
raise ValueError('Invalid boundary in multipart form: %r' % (boundary,))
|
|
||||||
|
|
||||||
nextpart = "--" + boundary
|
|
||||||
lastpart = "--" + boundary + "--"
|
|
||||||
partdict = {}
|
|
||||||
terminator = ""
|
|
||||||
|
|
||||||
while terminator != lastpart:
|
|
||||||
c_bytes = -1
|
|
||||||
|
|
||||||
data = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
if terminator:
|
|
||||||
# At start of next part. Read headers first.
|
|
||||||
headers = mimetools.Message(fp)
|
|
||||||
clength = headers.getheader('content-length')
|
|
||||||
if clength:
|
|
||||||
try:
|
|
||||||
c_bytes = int(clength)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
if c_bytes > 0:
|
|
||||||
data.write(fp.read(c_bytes))
|
|
||||||
# Read lines until end of part.
|
|
||||||
while 1:
|
|
||||||
line = fp.readline()
|
|
||||||
if not line:
|
|
||||||
terminator = lastpart # End outer loop
|
|
||||||
break
|
|
||||||
if line[:2] == "--":
|
|
||||||
terminator = line.strip()
|
|
||||||
if terminator in (nextpart, lastpart):
|
|
||||||
break
|
|
||||||
data.write(line)
|
|
||||||
# Done with part.
|
|
||||||
if data.tell() == 0:
|
|
||||||
continue
|
|
||||||
if c_bytes < 0:
|
|
||||||
# if a Content-Length header was not supplied with the MIME part
|
|
||||||
# then the trailing line break must be removed.
|
|
||||||
# we have data, read the last 2 bytes
|
|
||||||
rewind = min(2, data.tell())
|
|
||||||
data.seek(-rewind, os.SEEK_END)
|
|
||||||
line = data.read(2)
|
|
||||||
if line[-2:] == "\r\n":
|
|
||||||
data.seek(-2, os.SEEK_END)
|
|
||||||
data.truncate()
|
|
||||||
elif line[-1:] == "\n":
|
|
||||||
data.seek(-1, os.SEEK_END)
|
|
||||||
data.truncate()
|
|
||||||
|
|
||||||
line = headers['content-disposition']
|
|
||||||
if not line:
|
|
||||||
continue
|
|
||||||
key, params = cgi.parse_header(line)
|
|
||||||
if key != 'form-data':
|
|
||||||
continue
|
|
||||||
if 'name' in params:
|
|
||||||
name = params['name']
|
|
||||||
# kludge in the filename
|
|
||||||
if 'filename' in params:
|
|
||||||
fname_index = name + '_filename'
|
|
||||||
if fname_index in partdict:
|
|
||||||
partdict[fname_index].append(params['filename'])
|
|
||||||
else:
|
|
||||||
partdict[fname_index] = [params['filename']]
|
|
||||||
else:
|
|
||||||
# Unnamed parts are not returned at all.
|
|
||||||
continue
|
|
||||||
data.seek(0, 0)
|
|
||||||
if name in partdict:
|
|
||||||
partdict[name].append(data)
|
|
||||||
else:
|
|
||||||
partdict[name] = [data]
|
|
||||||
|
|
||||||
fp.seek(rewind) # Restore cursor
|
|
||||||
return partdict
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ from lbrynet import conf
|
||||||
from lbrynet.daemon.Daemon import Daemon
|
from lbrynet.daemon.Daemon import Daemon
|
||||||
from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm
|
from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm
|
||||||
from lbrynet.daemon.auth.util import initialize_api_key_file
|
from lbrynet.daemon.auth.util import initialize_api_key_file
|
||||||
from lbrynet.daemon.DaemonRequest import DaemonRequest
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -36,7 +35,6 @@ class DaemonServer(object):
|
||||||
self.root.putChild(conf.settings['API_ADDRESS'], self._daemon)
|
self.root.putChild(conf.settings['API_ADDRESS'], self._daemon)
|
||||||
|
|
||||||
lbrynet_server = get_site_base(use_auth, self.root)
|
lbrynet_server = get_site_base(use_auth, self.root)
|
||||||
lbrynet_server.requestFactory = DaemonRequest
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.server_port = reactor.listenTCP(
|
self.server_port = reactor.listenTCP(
|
||||||
|
|
|
@ -1,61 +0,0 @@
|
||||||
import json
|
|
||||||
import logging.handlers
|
|
||||||
import os
|
|
||||||
|
|
||||||
from twisted.internet.task import LoopingCall
|
|
||||||
from twisted.internet import reactor
|
|
||||||
from lbrynet import conf
|
|
||||||
|
|
||||||
|
|
||||||
conf.initialize_settings()
|
|
||||||
log_dir = conf.settings['data_dir']
|
|
||||||
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
|
|
||||||
|
|
||||||
if os.path.isfile(LOG_FILENAME):
|
|
||||||
f = open(LOG_FILENAME, 'r')
|
|
||||||
PREVIOUS_LOG = len(f.read())
|
|
||||||
f.close()
|
|
||||||
else:
|
|
||||||
PREVIOUS_LOG = 0
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5)
|
|
||||||
log.addHandler(handler)
|
|
||||||
log.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
class Autofetcher(object):
|
|
||||||
"""
|
|
||||||
Download name claims as they occur
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, api):
|
|
||||||
self._api = api
|
|
||||||
self._checker = LoopingCall(self._check_for_new_claims)
|
|
||||||
self.best_block = None
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
|
|
||||||
self._checker.start(5)
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
log.info("Stopping autofetcher")
|
|
||||||
self._checker.stop()
|
|
||||||
|
|
||||||
def _check_for_new_claims(self):
|
|
||||||
block = self._api.get_best_blockhash()
|
|
||||||
if block != self.best_block:
|
|
||||||
log.info("Checking new block for name claims, block hash: %s" % block)
|
|
||||||
self.best_block = block
|
|
||||||
transactions = self._api.get_block({'blockhash': block})['tx']
|
|
||||||
for t in transactions:
|
|
||||||
c = self._api.get_claims_for_tx({'txid': t})
|
|
||||||
if len(c):
|
|
||||||
for i in c:
|
|
||||||
log.info("Downloading stream for claim txid: %s" % t)
|
|
||||||
self._api.get({'name': t, 'stream_info': json.loads(i['value'])})
|
|
||||||
|
|
||||||
|
|
||||||
def run(api):
|
|
||||||
fetcher = Autofetcher(api)
|
|
||||||
fetcher.start()
|
|
|
@ -1,37 +0,0 @@
|
||||||
from twisted.internet import defer
|
|
||||||
|
|
||||||
|
|
||||||
class migrator(object):
|
|
||||||
"""
|
|
||||||
Re-resolve lbry names to write missing data to blockchain.db and to cache the nametrie
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, api):
|
|
||||||
self._api = api
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
def _resolve_claims(claimtrie):
|
|
||||||
claims = [i for i in claimtrie if 'txid' in i.keys()]
|
|
||||||
r = defer.DeferredList(
|
|
||||||
[self._api._resolve_name(claim['name'], force_refresh=True) for claim in claims],
|
|
||||||
consumeErrors=True)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _restart_lbry_files():
|
|
||||||
def _restart_lbry_file(lbry_file):
|
|
||||||
return lbry_file.restore()
|
|
||||||
|
|
||||||
lbry_files = self._api.lbry_file_manager.lbry_files
|
|
||||||
r = defer.DeferredList(
|
|
||||||
[_restart_lbry_file(lbry_file) for lbry_file in lbry_files if not lbry_file.txid],
|
|
||||||
consumeErrors=True)
|
|
||||||
return r
|
|
||||||
|
|
||||||
d = self._api.session.wallet.get_nametrie()
|
|
||||||
d.addCallback(_resolve_claims)
|
|
||||||
d.addCallback(lambda _: _restart_lbry_files())
|
|
||||||
|
|
||||||
|
|
||||||
def run(api):
|
|
||||||
refresher = migrator(api)
|
|
||||||
refresher.start()
|
|
3
setup.py
3
setup.py
|
@ -65,8 +65,5 @@ setup(
|
||||||
packages=find_packages(base_dir),
|
packages=find_packages(base_dir),
|
||||||
install_requires=requires,
|
install_requires=requires,
|
||||||
entry_points={'console_scripts': console_scripts},
|
entry_points={'console_scripts': console_scripts},
|
||||||
package_data={
|
|
||||||
package_name: list(package_files('lbrynet/resources/ui'))
|
|
||||||
},
|
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in a new issue