forked from LBRYCommunity/lbry-sdk
Merge branch 'remove-unused-files'
This commit is contained in:
commit
ade8b64ca7
16 changed files with 3 additions and 1061 deletions
|
@ -1,20 +0,0 @@
|
|||
[bumpversion]
|
||||
current_version = 0.15.0
|
||||
commit = True
|
||||
tag = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)((?P<release>[a-z]+)(?P<candidate>\d+))?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}{release}{candidate}
|
||||
{major}.{minor}.{patch}
|
||||
|
||||
[bumpversion:part:candidate]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:release]
|
||||
optional_value = production
|
||||
values =
|
||||
rc
|
||||
production
|
||||
|
||||
[bumpversion:file:lbrynet/__init__.py]
|
||||
|
0
.gitmodules
vendored
0
.gitmodules
vendored
|
@ -32,7 +32,8 @@ at anytime.
|
|||
|
||||
### Removed
|
||||
* Removed claim related filter arguments `name`, `claim_id`, and `outpoint` from `file_list`, `file_delete`, `file_set_status`, and `file_reflect`
|
||||
*
|
||||
* Removed unused files
|
||||
* Removed old and unused UI related code
|
||||
|
||||
|
||||
## [0.18.0] - 2017-11-08
|
||||
|
|
86
FAQ.md
86
FAQ.md
|
@ -1,86 +0,0 @@
|
|||
#### Getting LBRY for development
|
||||
|
||||
Q: How do I get lbry for command line?
|
||||
|
||||
A: In order to run lbry from command line, you need more than the packaged app/deb.
|
||||
|
||||
###### On OS X
|
||||
|
||||
You can install LBRY command line by running `curl -sL https://rawgit.com/lbryio/lbry-setup/master/lbry_setup_osx.sh | sudo bash` in a terminal. This script will install lbrynet and its dependancies, as well as the app.
|
||||
|
||||
###### On Linux
|
||||
|
||||
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
||||
|
||||
sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip git
|
||||
git clone https://github.com/lbryio/lbry.git
|
||||
cd lbry
|
||||
sudo python setup.py install
|
||||
|
||||
#### Using LBRY
|
||||
|
||||
Q: How do I run lbry from command line?
|
||||
|
||||
A: The command is `lbrynet-daemon`
|
||||
|
||||
***********
|
||||
|
||||
Q: How do I stop lbry from the command line?
|
||||
|
||||
A: You can ctrl-c or run `stop-lbrynet-daemon`
|
||||
|
||||
***********
|
||||
|
||||
Q: How do I run lbry with lbrycrdd (the blockchain node application)?
|
||||
|
||||
A: Start lbry with the --wallet flag set: `lbrynet-daemon --wallet=lbrycrd`
|
||||
|
||||
Note: when you change the wallet it is persistant until you specify you want to use another wallet - lbryum - with the --wallet flag again.
|
||||
|
||||
***********
|
||||
|
||||
Q: Where are all the behind the scenes files?
|
||||
|
||||
A: On linux, the relevant directories are `~/.lbrynet`, `~/.lbrycrd`, and `~/.lbryum`, depending on which wallets you've used. On OS X, the folders of interest are `~/Library/Application Support/LBRY`, `~/.lbrycrd` and `~/.lbryum`, also depending on which wallets you've used.
|
||||
|
||||
***********
|
||||
|
||||
Q: How can I see the log in the console?
|
||||
|
||||
A: Run lbry with the --log-to-console flag set: `lbrynet-daemon --log-to-console`
|
||||
|
||||
***********
|
||||
|
||||
Q: How do I specify a web-UI to use?
|
||||
|
||||
A: If the files for the UI you'd like to use are storred locally on your computer, start lbry with the --ui flag: `lbrynet-daemon --ui=/full/path/to/ui/files/root/folder`
|
||||
|
||||
Note, once set with the UI flag the given UI will be cached by lbry and used as the default going forward. Also, it will only successfully load a UI if it contains a conforming requirements.txt file to specify required lbrynet and lbryum versions. [Here](https://github.com/lbryio/lbry-web-ui/blob/master/dist/requirements.txt) is an example requirements.txt file.
|
||||
|
||||
To reset your ui to pull from lbryio, or to try a UI still in development, run lbry with the --branch flag: `lbrynet=daemon --branch=master`
|
||||
|
||||
***********
|
||||
|
||||
Q: How do I see the list of API functions I can call, and how do I call them?
|
||||
|
||||
A: Here is an example script to get the documentation for the various API calls. To use any of the functions displayed, just provide any specified arguments in a dictionary.
|
||||
|
||||
Note: the lbry api can only be used while either the app or lbrynet-daemon command line are running
|
||||
|
||||
import sys
|
||||
from jsonrpc.proxy import JSONRPCProxy
|
||||
|
||||
try:
|
||||
from lbrynet.conf import API_CONNECTION_STRING
|
||||
except:
|
||||
print "You don't have lbrynet installed!"
|
||||
sys.exit(0)
|
||||
|
||||
api = JSONRPCProxy.from_url(API_CONNECTION_STRING)
|
||||
status = api.status()
|
||||
if not status['is_running']:
|
||||
print status
|
||||
else:
|
||||
for cmd in api.commands():
|
||||
print "%s:\n%s" % (cmd, api.help({'command': cmd}))
|
||||
|
|
@ -27,7 +27,7 @@ By default, `lbrynet-daemon` will provide a JSON-RPC server at `http://localhost
|
|||
|
||||
Our [quickstart guide](http://lbry.io/quickstart) provides a simple walkthrough and examples for learning.
|
||||
|
||||
The full API is documented [here](https://lbry.io/api).
|
||||
The full API is documented [here](https://lbryio.github.io/lbry/cli).
|
||||
|
||||
## What is LBRY?
|
||||
|
||||
|
|
BIN
app.icns
BIN
app.icns
Binary file not shown.
|
@ -1,129 +0,0 @@
|
|||
import datetime
|
||||
import re
|
||||
|
||||
CHANGELOG_START_RE = re.compile(r'^\#\# \[Unreleased\]')
|
||||
CHANGELOG_END_RE = re.compile(r'^\#\# \[.*\] - \d{4}-\d{2}-\d{2}')
|
||||
# if we come across a section header between two release section headers
|
||||
# then we probably have an improperly formatted changelog
|
||||
CHANGELOG_ERROR_RE = re.compile(r'^\#\# ')
|
||||
SECTION_RE = re.compile(r'^\#\#\# (.*)$')
|
||||
EMPTY_RE = re.compile(r'^\w*\*\w*$')
|
||||
ENTRY_RE = re.compile(r'\* (.*)')
|
||||
VALID_SECTIONS = ['Added', 'Changed', 'Deprecated', 'Removed', 'Fixed', 'Security']
|
||||
|
||||
# allocate some entries to cut-down on merge conflicts
|
||||
TEMPLATE = """### Added
|
||||
*
|
||||
*
|
||||
|
||||
### Changed
|
||||
*
|
||||
*
|
||||
|
||||
### Fixed
|
||||
*
|
||||
*
|
||||
|
||||
### Deprecated
|
||||
*
|
||||
*
|
||||
|
||||
### Removed
|
||||
*
|
||||
*
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Changelog(object):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.start = []
|
||||
self.unreleased = []
|
||||
self.rest = []
|
||||
self._parse()
|
||||
|
||||
def _parse(self):
|
||||
with open(self.path) as fp:
|
||||
lines = fp.readlines()
|
||||
|
||||
unreleased_start_found = False
|
||||
unreleased_end_found = False
|
||||
|
||||
for line in lines:
|
||||
if not unreleased_start_found:
|
||||
self.start.append(line)
|
||||
if CHANGELOG_START_RE.search(line):
|
||||
unreleased_start_found = True
|
||||
continue
|
||||
if unreleased_end_found:
|
||||
self.rest.append(line)
|
||||
continue
|
||||
if CHANGELOG_END_RE.search(line):
|
||||
self.rest.append(line)
|
||||
unreleased_end_found = True
|
||||
continue
|
||||
if CHANGELOG_ERROR_RE.search(line):
|
||||
raise Exception(
|
||||
'Failed to parse {}: {}'.format(self.path, 'unexpected section header found'))
|
||||
self.unreleased.append(line)
|
||||
|
||||
self.unreleased = self._normalize_section(self.unreleased)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_section(lines):
|
||||
"""Parse a changelog entry and output a normalized form"""
|
||||
sections = {}
|
||||
current_section_name = None
|
||||
current_section_contents = []
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if not line or EMPTY_RE.match(line):
|
||||
continue
|
||||
match = SECTION_RE.match(line)
|
||||
if match:
|
||||
if current_section_contents:
|
||||
sections[current_section_name] = current_section_contents
|
||||
current_section_contents = []
|
||||
current_section_name = match.group(1)
|
||||
if current_section_name not in VALID_SECTIONS:
|
||||
raise ValueError("Section '{}' is not valid".format(current_section_name))
|
||||
continue
|
||||
match = ENTRY_RE.match(line)
|
||||
if match:
|
||||
current_section_contents.append(match.group(1))
|
||||
continue
|
||||
raise Exception('Something is wrong with line: {}'.format(line))
|
||||
if current_section_contents:
|
||||
sections[current_section_name] = current_section_contents
|
||||
|
||||
output = []
|
||||
for section in VALID_SECTIONS:
|
||||
if section not in sections:
|
||||
continue
|
||||
output.append('### {}'.format(section))
|
||||
for entry in sections[section]:
|
||||
output.append(' * {}'.format(entry))
|
||||
output.append("\n")
|
||||
return output
|
||||
|
||||
def get_unreleased(self):
|
||||
return '\n'.join(self.unreleased) if self.unreleased else None
|
||||
|
||||
def bump(self, version):
|
||||
if not self.unreleased:
|
||||
return
|
||||
|
||||
today = datetime.datetime.today()
|
||||
header = "## [{}] - {}\n\n".format(version, today.strftime('%Y-%m-%d'))
|
||||
|
||||
changelog_data = (
|
||||
''.join(self.start) +
|
||||
TEMPLATE +
|
||||
header +
|
||||
'\n'.join(self.unreleased) + '\n\n'
|
||||
+ ''.join(self.rest)
|
||||
)
|
||||
|
||||
with open(self.path, 'w') as fp:
|
||||
fp.write(changelog_data)
|
204
build/release.py
204
build/release.py
|
@ -1,204 +0,0 @@
|
|||
"""Bump version and create Github release
|
||||
|
||||
This script should be run locally, not on a build server.
|
||||
"""
|
||||
import argparse
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import git
|
||||
import github
|
||||
|
||||
import changelog
|
||||
|
||||
ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
|
||||
def main():
|
||||
bumpversion_parts = get_bumpversion_parts()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("part", choices=bumpversion_parts, help="part of version to bump")
|
||||
parser.add_argument("--skip-sanity-checks", action="store_true")
|
||||
parser.add_argument("--skip-push", action="store_true")
|
||||
parser.add_argument("--dry-run", action="store_true")
|
||||
parser.add_argument("--confirm", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.dry_run:
|
||||
print "DRY RUN. Nothing will be committed/pushed."
|
||||
|
||||
repo = Repo('lbry', args.part, ROOT)
|
||||
branch = 'master'
|
||||
|
||||
print 'Current version: {}'.format(repo.current_version)
|
||||
print 'New version: {}'.format(repo.new_version)
|
||||
|
||||
if not args.confirm and not confirm():
|
||||
print "Aborting"
|
||||
return 1
|
||||
|
||||
if not args.skip_sanity_checks:
|
||||
run_sanity_checks(repo, branch)
|
||||
repo.assert_new_tag_is_absent()
|
||||
|
||||
is_rc = re.search('\drc\d+$', repo.new_version) is not None
|
||||
# only have a release message for real releases, not for RCs
|
||||
release_msg = None if is_rc else repo.get_unreleased_changelog()
|
||||
if release_msg is None:
|
||||
release_msg = ''
|
||||
|
||||
if args.dry_run:
|
||||
print "rc: " + ("yes" if is_rc else "no")
|
||||
print "release message: \n" + (release_msg if not is_rc else " NO MESSAGE FOR RCs")
|
||||
return
|
||||
|
||||
gh_token = get_gh_token()
|
||||
auth = github.Github(gh_token)
|
||||
github_repo = auth.get_repo('lbryio/lbry')
|
||||
|
||||
if not is_rc:
|
||||
repo.bump_changelog()
|
||||
repo.bumpversion()
|
||||
|
||||
new_tag = repo.get_new_tag()
|
||||
github_repo.create_git_release(new_tag, new_tag, release_msg, draft=True, prerelease=is_rc)
|
||||
|
||||
if args.skip_push:
|
||||
print (
|
||||
'Skipping push; you will have to reset and delete tags if '
|
||||
'you want to run this script again.'
|
||||
)
|
||||
else:
|
||||
repo.git_repo.git.push(follow_tags=True, recurse_submodules='check')
|
||||
|
||||
|
||||
class Repo(object):
|
||||
def __init__(self, name, part, directory):
|
||||
self.name = name
|
||||
self.part = part
|
||||
if not self.part:
|
||||
raise Exception('Part required')
|
||||
self.directory = directory
|
||||
self.git_repo = git.Repo(self.directory)
|
||||
self._bumped = False
|
||||
|
||||
self.current_version = self._get_current_version()
|
||||
self.new_version = self._get_new_version()
|
||||
self._changelog = changelog.Changelog(os.path.join(self.directory, 'CHANGELOG.md'))
|
||||
|
||||
def get_new_tag(self):
|
||||
return 'v' + self.new_version
|
||||
|
||||
def get_unreleased_changelog(self):
|
||||
return self._changelog.get_unreleased()
|
||||
|
||||
def bump_changelog(self):
|
||||
self._changelog.bump(self.new_version)
|
||||
with pushd(self.directory):
|
||||
self.git_repo.git.add(os.path.basename(self._changelog.path))
|
||||
|
||||
def _get_current_version(self):
|
||||
with pushd(self.directory):
|
||||
output = subprocess.check_output(
|
||||
['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part])
|
||||
return re.search('^current_version=(.*)$', output, re.M).group(1)
|
||||
|
||||
def _get_new_version(self):
|
||||
with pushd(self.directory):
|
||||
output = subprocess.check_output(
|
||||
['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part])
|
||||
return re.search('^new_version=(.*)$', output, re.M).group(1)
|
||||
|
||||
def bumpversion(self):
|
||||
if self._bumped:
|
||||
raise Exception('Cowardly refusing to bump a repo twice')
|
||||
with pushd(self.directory):
|
||||
subprocess.check_call(['bumpversion', '--allow-dirty', self.part])
|
||||
self._bumped = True
|
||||
|
||||
def assert_new_tag_is_absent(self):
|
||||
new_tag = self.get_new_tag()
|
||||
tags = self.git_repo.git.tag()
|
||||
if new_tag in tags.split('\n'):
|
||||
raise Exception('Tag {} is already present in repo {}.'.format(new_tag, self.name))
|
||||
|
||||
def is_behind(self, branch):
|
||||
self.git_repo.remotes.origin.fetch()
|
||||
rev_list = '{branch}...origin/{branch}'.format(branch=branch)
|
||||
commits_behind = self.git_repo.git.rev_list(rev_list, right_only=True, count=True)
|
||||
commits_behind = int(commits_behind)
|
||||
return commits_behind > 0
|
||||
|
||||
|
||||
def get_bumpversion_parts():
|
||||
with pushd(ROOT):
|
||||
output = subprocess.check_output([
|
||||
'bumpversion', '--dry-run', '--list', '--allow-dirty', 'fake-part',
|
||||
])
|
||||
parse_line = re.search('^parse=(.*)$', output, re.M).group(1)
|
||||
return tuple(re.findall('<([^>]+)>', parse_line))
|
||||
|
||||
|
||||
def get_gh_token():
|
||||
if 'GH_TOKEN' in os.environ:
|
||||
return os.environ['GH_TOKEN']
|
||||
else:
|
||||
print """
|
||||
Please enter your personal access token. If you don't have one
|
||||
See https://github.com/lbryio/lbry-app/wiki/Release-Script#generate-a-personal-access-token
|
||||
for instructions on how to generate one.
|
||||
|
||||
You can also set the GH_TOKEN environment variable to avoid seeing this message
|
||||
in the future"""
|
||||
return raw_input('token: ').strip()
|
||||
|
||||
|
||||
def confirm():
|
||||
try:
|
||||
return raw_input('Is this what you want? [y/N] ').strip().lower() == 'y'
|
||||
except KeyboardInterrupt:
|
||||
return False
|
||||
|
||||
|
||||
def run_sanity_checks(repo, branch):
|
||||
if repo.git_repo.is_dirty():
|
||||
print 'Cowardly refusing to release a dirty repo'
|
||||
sys.exit(1)
|
||||
if repo.git_repo.active_branch.name != branch:
|
||||
print 'Cowardly refusing to release when not on the {} branch'.format(branch)
|
||||
sys.exit(1)
|
||||
if repo.is_behind(branch):
|
||||
print 'Cowardly refusing to release when behind origin'
|
||||
sys.exit(1)
|
||||
if not is_custom_bumpversion_version():
|
||||
print (
|
||||
'Install LBRY\'s fork of bumpversion: '
|
||||
'pip install -U git+https://github.com/lbryio/bumpversion.git'
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_custom_bumpversion_version():
|
||||
try:
|
||||
output = subprocess.check_output(['bumpversion', '-v'], stderr=subprocess.STDOUT).strip()
|
||||
if output == 'bumpversion 0.5.4-lbry':
|
||||
return True
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(new_dir):
|
||||
previous_dir = os.getcwd()
|
||||
os.chdir(new_dir)
|
||||
yield
|
||||
os.chdir(previous_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
332
ez_setup.py
332
ez_setup.py
|
@ -1,332 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
"""Bootstrap setuptools installation
|
||||
|
||||
To use setuptools in your package's setup.py, include this
|
||||
file in the same directory and add this to the top of your setup.py::
|
||||
|
||||
from ez_setup import use_setuptools
|
||||
use_setuptools()
|
||||
|
||||
To require a specific version of setuptools, set a download
|
||||
mirror, or use an alternate download directory, simply supply
|
||||
the appropriate options to ``use_setuptools()``.
|
||||
|
||||
This file can also be run as a script to install or upgrade setuptools.
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
import optparse
|
||||
import subprocess
|
||||
import platform
|
||||
import textwrap
|
||||
import contextlib
|
||||
|
||||
from distutils import log
|
||||
|
||||
try:
|
||||
from urllib.request import urlopen
|
||||
except ImportError:
|
||||
from urllib2 import urlopen
|
||||
|
||||
try:
|
||||
from site import USER_SITE
|
||||
except ImportError:
|
||||
USER_SITE = None
|
||||
|
||||
DEFAULT_VERSION = "4.0.1"
|
||||
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
|
||||
|
||||
def _python_cmd(*args):
|
||||
"""
|
||||
Return True if the command succeeded.
|
||||
"""
|
||||
args = (sys.executable,) + args
|
||||
return subprocess.call(args) == 0
|
||||
|
||||
|
||||
def _install(archive_filename, install_args=()):
|
||||
with archive_context(archive_filename):
|
||||
# installing
|
||||
log.warn('Installing Setuptools')
|
||||
if not _python_cmd('setup.py', 'install', *install_args):
|
||||
log.warn('Something went wrong during the installation.')
|
||||
log.warn('See the error message above.')
|
||||
# exitcode will be 2
|
||||
return 2
|
||||
|
||||
|
||||
def _build_egg(egg, archive_filename, to_dir):
|
||||
with archive_context(archive_filename):
|
||||
# building an egg
|
||||
log.warn('Building a Setuptools egg in %s', to_dir)
|
||||
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
|
||||
# returning the result
|
||||
log.warn(egg)
|
||||
if not os.path.exists(egg):
|
||||
raise IOError('Could not build the egg.')
|
||||
|
||||
|
||||
class ContextualZipFile(zipfile.ZipFile):
|
||||
"""
|
||||
Supplement ZipFile class to support context manager for Python 2.6
|
||||
"""
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.close()
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
"""
|
||||
Construct a ZipFile or ContextualZipFile as appropriate
|
||||
"""
|
||||
if hasattr(zipfile.ZipFile, '__exit__'):
|
||||
return zipfile.ZipFile(*args, **kwargs)
|
||||
return super(ContextualZipFile, cls).__new__(cls)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def archive_context(filename):
|
||||
# extracting the archive
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
log.warn('Extracting in %s', tmpdir)
|
||||
old_wd = os.getcwd()
|
||||
try:
|
||||
os.chdir(tmpdir)
|
||||
with ContextualZipFile(filename) as archive:
|
||||
archive.extractall()
|
||||
|
||||
# going in the directory
|
||||
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
|
||||
os.chdir(subdir)
|
||||
log.warn('Now working in %s', subdir)
|
||||
yield
|
||||
|
||||
finally:
|
||||
os.chdir(old_wd)
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def _do_download(version, download_base, to_dir, download_delay):
|
||||
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
|
||||
% (version, sys.version_info[0], sys.version_info[1]))
|
||||
if not os.path.exists(egg):
|
||||
archive = download_setuptools(version, download_base,
|
||||
to_dir, download_delay)
|
||||
_build_egg(egg, archive, to_dir)
|
||||
sys.path.insert(0, egg)
|
||||
|
||||
# Remove previously-imported pkg_resources if present (see
|
||||
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
|
||||
if 'pkg_resources' in sys.modules:
|
||||
del sys.modules['pkg_resources']
|
||||
|
||||
import setuptools
|
||||
setuptools.bootstrap_install_from = egg
|
||||
|
||||
|
||||
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||
to_dir=os.curdir, download_delay=15):
|
||||
to_dir = os.path.abspath(to_dir)
|
||||
rep_modules = 'pkg_resources', 'setuptools'
|
||||
imported = set(sys.modules).intersection(rep_modules)
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
return _do_download(version, download_base, to_dir, download_delay)
|
||||
try:
|
||||
pkg_resources.require("setuptools>=" + version)
|
||||
return
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return _do_download(version, download_base, to_dir, download_delay)
|
||||
except pkg_resources.VersionConflict as VC_err:
|
||||
if imported:
|
||||
msg = textwrap.dedent("""
|
||||
The required version of setuptools (>={version}) is not available,
|
||||
and can't be installed while this script is running. Please
|
||||
install a more recent version first, using
|
||||
'easy_install -U setuptools'.
|
||||
|
||||
(Currently using {VC_err.args[0]!r})
|
||||
""").format(VC_err=VC_err, version=version)
|
||||
sys.stderr.write(msg)
|
||||
sys.exit(2)
|
||||
|
||||
# otherwise, reload ok
|
||||
del pkg_resources, sys.modules['pkg_resources']
|
||||
return _do_download(version, download_base, to_dir, download_delay)
|
||||
|
||||
def _clean_check(cmd, target):
|
||||
"""
|
||||
Run the command to download target. If the command fails, clean up before
|
||||
re-raising the error.
|
||||
"""
|
||||
try:
|
||||
subprocess.check_call(cmd)
|
||||
except subprocess.CalledProcessError:
|
||||
if os.access(target, os.F_OK):
|
||||
os.unlink(target)
|
||||
raise
|
||||
|
||||
def download_file_powershell(url, target):
|
||||
"""
|
||||
Download the file at url to target using Powershell (which will validate
|
||||
trust). Raise an exception if the command cannot complete.
|
||||
"""
|
||||
target = os.path.abspath(target)
|
||||
ps_cmd = (
|
||||
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
|
||||
"[System.Net.CredentialCache]::DefaultCredentials; "
|
||||
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
|
||||
% vars()
|
||||
)
|
||||
cmd = [
|
||||
'powershell',
|
||||
'-Command',
|
||||
ps_cmd,
|
||||
]
|
||||
_clean_check(cmd, target)
|
||||
|
||||
def has_powershell():
|
||||
if platform.system() != 'Windows':
|
||||
return False
|
||||
cmd = ['powershell', '-Command', 'echo test']
|
||||
with open(os.path.devnull, 'wb') as devnull:
|
||||
try:
|
||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
download_file_powershell.viable = has_powershell
|
||||
|
||||
def download_file_curl(url, target):
|
||||
cmd = ['curl', url, '--silent', '--output', target]
|
||||
_clean_check(cmd, target)
|
||||
|
||||
def has_curl():
|
||||
cmd = ['curl', '--version']
|
||||
with open(os.path.devnull, 'wb') as devnull:
|
||||
try:
|
||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
download_file_curl.viable = has_curl
|
||||
|
||||
def download_file_wget(url, target):
|
||||
cmd = ['wget', url, '--quiet', '--output-document', target]
|
||||
_clean_check(cmd, target)
|
||||
|
||||
def has_wget():
|
||||
cmd = ['wget', '--version']
|
||||
with open(os.path.devnull, 'wb') as devnull:
|
||||
try:
|
||||
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
|
||||
except Exception:
|
||||
return False
|
||||
return True
|
||||
|
||||
download_file_wget.viable = has_wget
|
||||
|
||||
def download_file_insecure(url, target):
|
||||
"""
|
||||
Use Python to download the file, even though it cannot authenticate the
|
||||
connection.
|
||||
"""
|
||||
src = urlopen(url)
|
||||
try:
|
||||
# Read all the data in one block.
|
||||
data = src.read()
|
||||
finally:
|
||||
src.close()
|
||||
|
||||
# Write all the data in one block to avoid creating a partial file.
|
||||
with open(target, "wb") as dst:
|
||||
dst.write(data)
|
||||
|
||||
download_file_insecure.viable = lambda: True
|
||||
|
||||
def get_best_downloader():
|
||||
downloaders = (
|
||||
download_file_powershell,
|
||||
download_file_curl,
|
||||
download_file_wget,
|
||||
download_file_insecure,
|
||||
)
|
||||
viable_downloaders = (dl for dl in downloaders if dl.viable())
|
||||
return next(viable_downloaders, None)
|
||||
|
||||
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
|
||||
to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
|
||||
"""
|
||||
Download setuptools from a specified location and return its filename
|
||||
|
||||
`version` should be a valid setuptools version number that is available
|
||||
as an egg for download under the `download_base` URL (which should end
|
||||
with a '/'). `to_dir` is the directory where the egg will be downloaded.
|
||||
`delay` is the number of seconds to pause before an actual download
|
||||
attempt.
|
||||
|
||||
``downloader_factory`` should be a function taking no arguments and
|
||||
returning a function for downloading a URL to a target.
|
||||
"""
|
||||
# making sure we use the absolute path
|
||||
to_dir = os.path.abspath(to_dir)
|
||||
zip_name = "setuptools-%s.zip" % version
|
||||
url = download_base + zip_name
|
||||
saveto = os.path.join(to_dir, zip_name)
|
||||
if not os.path.exists(saveto): # Avoid repeated downloads
|
||||
log.warn("Downloading %s", url)
|
||||
downloader = downloader_factory()
|
||||
downloader(url, saveto)
|
||||
return os.path.realpath(saveto)
|
||||
|
||||
def _build_install_args(options):
|
||||
"""
|
||||
Build the arguments to 'python setup.py install' on the setuptools package
|
||||
"""
|
||||
return ['--user'] if options.user_install else []
|
||||
|
||||
def _parse_args():
|
||||
"""
|
||||
Parse the command line for options
|
||||
"""
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option(
|
||||
'--user', dest='user_install', action='store_true', default=False,
|
||||
help='install in user site package (requires Python 2.6 or later)')
|
||||
parser.add_option(
|
||||
'--download-base', dest='download_base', metavar="URL",
|
||||
default=DEFAULT_URL,
|
||||
help='alternative URL from where to download the setuptools package')
|
||||
parser.add_option(
|
||||
'--insecure', dest='downloader_factory', action='store_const',
|
||||
const=lambda: download_file_insecure, default=get_best_downloader,
|
||||
help='Use internal, non-validating downloader'
|
||||
)
|
||||
parser.add_option(
|
||||
'--version', help="Specify which version to download",
|
||||
default=DEFAULT_VERSION,
|
||||
)
|
||||
options, args = parser.parse_args()
|
||||
# positional arguments are ignored
|
||||
return options
|
||||
|
||||
def main():
|
||||
"""Install or upgrade setuptools and EasyInstall"""
|
||||
options = _parse_args()
|
||||
archive = download_setuptools(
|
||||
version=options.version,
|
||||
download_base=options.download_base,
|
||||
downloader_factory=options.downloader_factory,
|
||||
)
|
||||
return _install(archive, _build_install_args(options))
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -1,185 +0,0 @@
|
|||
import time
|
||||
import cgi
|
||||
import mimetools
|
||||
import os
|
||||
import tempfile
|
||||
from twisted.web import server
|
||||
|
||||
|
||||
class DaemonRequest(server.Request):
|
||||
"""
|
||||
For LBRY specific request functionality. Currently just provides
|
||||
handling for large multipart POST requests, taken from here:
|
||||
http://sammitch.ca/2013/07/handling-large-requests-in-twisted/
|
||||
|
||||
For multipart POST requests, this populates self.args with temp
|
||||
file objects instead of strings. Note that these files don't auto-delete
|
||||
on close because we want to be able to move and rename them.
|
||||
|
||||
"""
|
||||
|
||||
# max amount of memory to allow any ~single~ request argument [ie: POSTed file]
|
||||
# note: this value seems to be taken with a grain of salt, memory usage may spike
|
||||
# FAR above this value in some cases.
|
||||
# eg: set the memory limit to 5 MB, write 2 blocks of 4MB, mem usage will
|
||||
# have spiked to 8MB before the data is rolled to disk after the
|
||||
# second write completes.
|
||||
memorylimit = 1024*1024*100
|
||||
|
||||
# enable/disable debug logging
|
||||
do_log = False
|
||||
|
||||
# re-defined only for debug/logging purposes
|
||||
def gotLength(self, length):
|
||||
if self.do_log:
|
||||
print '%f Headers received, Content-Length: %d' % (time.time(), length)
|
||||
server.Request.gotLength(self, length)
|
||||
|
||||
# re-definition of twisted.web.server.Request.requestreceived, the only difference
|
||||
# is that self.parse_multipart() is used rather than cgi.parse_multipart()
|
||||
def requestReceived(self, command, path, version):
|
||||
from twisted.web.http import parse_qs
|
||||
if self.do_log:
|
||||
print '%f Request Received' % time.time()
|
||||
|
||||
self.content.seek(0, 0)
|
||||
self.args = {}
|
||||
self.stack = []
|
||||
|
||||
self.method, self.uri = command, path
|
||||
self.clientproto = version
|
||||
x = self.uri.split(b'?', 1)
|
||||
|
||||
if len(x) == 1:
|
||||
self.path = self.uri
|
||||
else:
|
||||
self.path, argstring = x
|
||||
self.args = parse_qs(argstring, 1)
|
||||
|
||||
# cache the client and server information, we'll need this later to be
|
||||
# serialized and sent with the request so CGIs will work remotely
|
||||
self.client = self.channel.transport.getPeer()
|
||||
self.host = self.channel.transport.getHost()
|
||||
|
||||
# Argument processing
|
||||
args = self.args
|
||||
ctype = self.requestHeaders.getRawHeaders(b'content-type')
|
||||
if ctype is not None:
|
||||
ctype = ctype[0]
|
||||
|
||||
if self.method == b"POST" and ctype:
|
||||
mfd = b'multipart/form-data'
|
||||
key, pdict = cgi.parse_header(ctype)
|
||||
if key == b'application/x-www-form-urlencoded':
|
||||
args.update(parse_qs(self.content.read(), 1))
|
||||
elif key == mfd:
|
||||
try:
|
||||
self.content.seek(0, 0)
|
||||
args.update(self.parse_multipart(self.content, pdict))
|
||||
|
||||
except KeyError as e:
|
||||
if e.args[0] == b'content-disposition':
|
||||
# Parse_multipart can't cope with missing
|
||||
# content-dispostion headers in multipart/form-data
|
||||
# parts, so we catch the exception and tell the client
|
||||
# it was a bad request.
|
||||
self.channel.transport.write(
|
||||
b"HTTP/1.1 400 Bad Request\r\n\r\n")
|
||||
self.channel.transport.loseConnection()
|
||||
return
|
||||
raise
|
||||
|
||||
self.content.seek(0, 0)
|
||||
|
||||
self.process()
|
||||
|
||||
# re-definition of cgi.parse_multipart that uses a single temporary file to store
|
||||
# data rather than storing 2 to 3 copies in various lists.
|
||||
def parse_multipart(self, fp, pdict):
|
||||
if self.do_log:
|
||||
print '%f Parsing Multipart data: ' % time.time()
|
||||
rewind = fp.tell() #save cursor
|
||||
fp.seek(0, 0) #reset cursor
|
||||
|
||||
boundary = ""
|
||||
if 'boundary' in pdict:
|
||||
boundary = pdict['boundary']
|
||||
if not cgi.valid_boundary(boundary):
|
||||
raise ValueError('Invalid boundary in multipart form: %r' % (boundary,))
|
||||
|
||||
nextpart = "--" + boundary
|
||||
lastpart = "--" + boundary + "--"
|
||||
partdict = {}
|
||||
terminator = ""
|
||||
|
||||
while terminator != lastpart:
|
||||
c_bytes = -1
|
||||
|
||||
data = tempfile.NamedTemporaryFile(delete=False)
|
||||
if terminator:
|
||||
# At start of next part. Read headers first.
|
||||
headers = mimetools.Message(fp)
|
||||
clength = headers.getheader('content-length')
|
||||
if clength:
|
||||
try:
|
||||
c_bytes = int(clength)
|
||||
except ValueError:
|
||||
pass
|
||||
if c_bytes > 0:
|
||||
data.write(fp.read(c_bytes))
|
||||
# Read lines until end of part.
|
||||
while 1:
|
||||
line = fp.readline()
|
||||
if not line:
|
||||
terminator = lastpart # End outer loop
|
||||
break
|
||||
if line[:2] == "--":
|
||||
terminator = line.strip()
|
||||
if terminator in (nextpart, lastpart):
|
||||
break
|
||||
data.write(line)
|
||||
# Done with part.
|
||||
if data.tell() == 0:
|
||||
continue
|
||||
if c_bytes < 0:
|
||||
# if a Content-Length header was not supplied with the MIME part
|
||||
# then the trailing line break must be removed.
|
||||
# we have data, read the last 2 bytes
|
||||
rewind = min(2, data.tell())
|
||||
data.seek(-rewind, os.SEEK_END)
|
||||
line = data.read(2)
|
||||
if line[-2:] == "\r\n":
|
||||
data.seek(-2, os.SEEK_END)
|
||||
data.truncate()
|
||||
elif line[-1:] == "\n":
|
||||
data.seek(-1, os.SEEK_END)
|
||||
data.truncate()
|
||||
|
||||
line = headers['content-disposition']
|
||||
if not line:
|
||||
continue
|
||||
key, params = cgi.parse_header(line)
|
||||
if key != 'form-data':
|
||||
continue
|
||||
if 'name' in params:
|
||||
name = params['name']
|
||||
# kludge in the filename
|
||||
if 'filename' in params:
|
||||
fname_index = name + '_filename'
|
||||
if fname_index in partdict:
|
||||
partdict[fname_index].append(params['filename'])
|
||||
else:
|
||||
partdict[fname_index] = [params['filename']]
|
||||
else:
|
||||
# Unnamed parts are not returned at all.
|
||||
continue
|
||||
data.seek(0, 0)
|
||||
if name in partdict:
|
||||
partdict[name].append(data)
|
||||
else:
|
||||
partdict[name] = [data]
|
||||
|
||||
fp.seek(rewind) # Restore cursor
|
||||
return partdict
|
||||
|
||||
|
|
@ -9,7 +9,6 @@ from lbrynet import conf
|
|||
from lbrynet.daemon.Daemon import Daemon
|
||||
from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm
|
||||
from lbrynet.daemon.auth.util import initialize_api_key_file
|
||||
from lbrynet.daemon.DaemonRequest import DaemonRequest
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -36,7 +35,6 @@ class DaemonServer(object):
|
|||
self.root.putChild(conf.settings['API_ADDRESS'], self._daemon)
|
||||
|
||||
lbrynet_server = get_site_base(use_auth, self.root)
|
||||
lbrynet_server.requestFactory = DaemonRequest
|
||||
|
||||
try:
|
||||
self.server_port = reactor.listenTCP(
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
import json
|
||||
import logging.handlers
|
||||
import os
|
||||
|
||||
from twisted.internet.task import LoopingCall
|
||||
from twisted.internet import reactor
|
||||
from lbrynet import conf
|
||||
|
||||
|
||||
conf.initialize_settings()
|
||||
log_dir = conf.settings['data_dir']
|
||||
LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log')
|
||||
|
||||
if os.path.isfile(LOG_FILENAME):
|
||||
f = open(LOG_FILENAME, 'r')
|
||||
PREVIOUS_LOG = len(f.read())
|
||||
f.close()
|
||||
else:
|
||||
PREVIOUS_LOG = 0
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5)
|
||||
log.addHandler(handler)
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class Autofetcher(object):
|
||||
"""
|
||||
Download name claims as they occur
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
self._api = api
|
||||
self._checker = LoopingCall(self._check_for_new_claims)
|
||||
self.best_block = None
|
||||
|
||||
def start(self):
|
||||
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
|
||||
self._checker.start(5)
|
||||
|
||||
def stop(self):
|
||||
log.info("Stopping autofetcher")
|
||||
self._checker.stop()
|
||||
|
||||
def _check_for_new_claims(self):
|
||||
block = self._api.get_best_blockhash()
|
||||
if block != self.best_block:
|
||||
log.info("Checking new block for name claims, block hash: %s" % block)
|
||||
self.best_block = block
|
||||
transactions = self._api.get_block({'blockhash': block})['tx']
|
||||
for t in transactions:
|
||||
c = self._api.get_claims_for_tx({'txid': t})
|
||||
if len(c):
|
||||
for i in c:
|
||||
log.info("Downloading stream for claim txid: %s" % t)
|
||||
self._api.get({'name': t, 'stream_info': json.loads(i['value'])})
|
||||
|
||||
|
||||
def run(api):
|
||||
fetcher = Autofetcher(api)
|
||||
fetcher.start()
|
|
@ -1,37 +0,0 @@
|
|||
from twisted.internet import defer
|
||||
|
||||
|
||||
class migrator(object):
|
||||
"""
|
||||
Re-resolve lbry names to write missing data to blockchain.db and to cache the nametrie
|
||||
"""
|
||||
|
||||
def __init__(self, api):
|
||||
self._api = api
|
||||
|
||||
def start(self):
|
||||
def _resolve_claims(claimtrie):
|
||||
claims = [i for i in claimtrie if 'txid' in i.keys()]
|
||||
r = defer.DeferredList(
|
||||
[self._api._resolve_name(claim['name'], force_refresh=True) for claim in claims],
|
||||
consumeErrors=True)
|
||||
return r
|
||||
|
||||
def _restart_lbry_files():
|
||||
def _restart_lbry_file(lbry_file):
|
||||
return lbry_file.restore()
|
||||
|
||||
lbry_files = self._api.lbry_file_manager.lbry_files
|
||||
r = defer.DeferredList(
|
||||
[_restart_lbry_file(lbry_file) for lbry_file in lbry_files if not lbry_file.txid],
|
||||
consumeErrors=True)
|
||||
return r
|
||||
|
||||
d = self._api.session.wallet.get_nametrie()
|
||||
d.addCallback(_resolve_claims)
|
||||
d.addCallback(lambda _: _restart_lbry_files())
|
||||
|
||||
|
||||
def run(api):
|
||||
refresher = migrator(api)
|
||||
refresher.start()
|
3
setup.py
3
setup.py
|
@ -65,8 +65,5 @@ setup(
|
|||
packages=find_packages(base_dir),
|
||||
install_requires=requires,
|
||||
entry_points={'console_scripts': console_scripts},
|
||||
package_data={
|
||||
package_name: list(package_files('lbrynet/resources/ui'))
|
||||
},
|
||||
zip_safe=False,
|
||||
)
|
||||
|
|
Loading…
Add table
Reference in a new issue