From 00edd3cb66ebb3cf6805dd070554524a9055aa85 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 29 Nov 2017 12:01:39 -0500 Subject: [PATCH 1/3] remove unused files --- .bumpversion.cfg | 20 -- .gitmodules | 0 CHANGELOG.md | 3 +- app.icns | Bin 8362 -> 0 bytes ez_setup.py | 332 ------------------ lbrynet/daemon/DaemonRequest.py | 185 ---------- lbrynet/daemon/DaemonServer.py | 2 - lbrynet/daemon/daemon_scripts/Autofetcher.py | 61 ---- lbrynet/daemon/daemon_scripts/__init__.py | 0 lbrynet/daemon/daemon_scripts/migrateto025.py | 37 -- setup.py | 3 - uri_handler/__init__.py | 0 12 files changed, 2 insertions(+), 641 deletions(-) delete mode 100644 .bumpversion.cfg delete mode 100644 .gitmodules delete mode 100644 app.icns delete mode 100644 ez_setup.py delete mode 100644 lbrynet/daemon/DaemonRequest.py delete mode 100644 lbrynet/daemon/daemon_scripts/Autofetcher.py delete mode 100644 lbrynet/daemon/daemon_scripts/__init__.py delete mode 100644 lbrynet/daemon/daemon_scripts/migrateto025.py delete mode 100644 uri_handler/__init__.py diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index 8d0e23700..000000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,20 +0,0 @@ -[bumpversion] -current_version = 0.15.0 -commit = True -tag = True -parse = (?P\d+)\.(?P\d+)\.(?P\d+)((?P[a-z]+)(?P\d+))? -serialize = - {major}.{minor}.{patch}{release}{candidate} - {major}.{minor}.{patch} - -[bumpversion:part:candidate] -first_value = 1 - -[bumpversion:part:release] -optional_value = production -values = - rc - production - -[bumpversion:file:lbrynet/__init__.py] - diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index e69de29bb..000000000 diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f9812547..1f5f2b610 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -32,7 +32,8 @@ at anytime. ### Removed * Removed claim related filter arguments `name`, `claim_id`, and `outpoint` from `file_list`, `file_delete`, `file_set_status`, and `file_reflect` - * + * Removed unused files + * Removed old and unused UI related code ## [0.18.0] - 2017-11-08 diff --git a/app.icns b/app.icns deleted file mode 100644 index b4d00d2f26dc4f4221a0d3e93e988a15b0ada3b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8362 zcmeHMX^^;-7s*c>T~rX2F#)n#eiv;zM+_U$%yH*`5yhGqf6(m#TYj-CUju3-fJkCv~Q4E zGH6N3P?DHRP(~~8n=Tp=HpFw=%nji{Jef2NJz)ZtVj8|#Bp<`RSs)-4B`S%kCMiU& z%_iC;G%IJ@n}lN7-9ky_3h1zGv{hS5pSw#UW?hjJ!opZpUJ6TCmOyXR5yBPL1`)R{ zjEc3LzMx!5Pzye*(YAaW-^=l7DT;_Kgw6No`9iZX)wc^mu|>fuh$sn+T0Es7iiG~0 z@U0N81YAk3FC$)FBEB-xvC5HdK`OT8yS-BJ+@g#+Qo7FC0t5?=FJrDNT{rLflAMy% zT)E!vku%#YDQyRHU6#h5T-yP)kd;u6nNqbb=;S%*b)>V-DN2BqwU~ezPAfhwB^l>7 zKCKMNlhX5@Zp?-(6&=`eoq{Y9ZlR_?H?O$&r?W^Xmrw!M@;{&7pI?g-w(e4Lzg_emcAC;`IR4i zRXP3M?_cuBR(0vVTW8LoskiRFUOByQxwh@G`2_O{O}%B$4YdB*3jjw|7!ytV+H-v~ zjyk|S)haQD@k@ztUwB<}9X~4ZSAI^usTs2y^sQ85WEu?JNE%BrsXkJZ&2xRTDrqcs z6Gl3n+)Pn(*-|~x=o?kKv2b3}T$V{~q?nmZ8i_?oJZE?cVl6UlCK7rtgbhFr)WS3?jO*pr&Bd zE(m8XYSfoxlD$-C@FTjUq8N{|+PFyHfX$;mm4R9#0nKTnb#drU55-}V{niWEV&FDV zEJ*Sbk0(U0O(fW-1aB$4LzqRq8sO|DMryInn@l5cpUhAbAq0JM3&)B9LPPv4JsG;; z8*|(<028pTCv=}QjOe~O6XUGV zOd(?7ip-mvC{aY$8+DjdvHZRnOQ(|1f*8gTg~UR{1j}n0jg5(g4dy0N%~ZPEh?xr( z8c8#`g;Z!nFy}S;`aMXIhPg&+Dg1qbke`+&*5qyEr?sq0*j83_xK1Bci+n5gy_{H4 zTeLS*!K!yUc+(TPc3Z4y%^@|dTu#_7Z*lmMh&hEmVm0U&>m!9x)Z& zSu-DQaxFrV}7-%e6~FT|n4Wd#KL!JjZTLVxXN3$D{`@ za#k-*%K@fn$ub7)@PZ+4fF8}twW1ko5hB}~vx=gJqP$;QSzk)5M1#=KW z6&ie@S^|#Dgf3_Ud?0ahq8VU2fD`TjZVe&~r=hM{`c|~Xw%Xb;Rdd3C@uA^Dsum)i z%MpMm6ic3ZmShlOMfM_`Nx+7GB`RJQi-ZU zV;BO=It!;PW&#hgWCKmbiJEQBu|8HSaQ1DYdfBlCfe9n%I54pV5wF6%;N`ZEo=(1l z*U%SXdODh1+vy@b9pge5r?JzScl&VH-hngAk?jjbK_Wk0CEr=VVU=xl-E@=KNHrLB zU5`&M9CxbYbrfYU&61e4suM?`i_;lx>0WS=lwGSmY@JvjnfD+a^IYL|bai%1#l`J3 z!nx;S4OrRgJid!K^i;<*N^+Ldxf)7jXNQM_6(1bL*RA6-wjiTA%XW!;8O0%POTa{Wf*^Pv)&j0T6u1~(HdmB&?s7|dU7>f)>(pWnCv9@-Qgns!j+^K?GdrW_Q_NU)Ez?SiqIdEa*FN!79CTI|{Pi zxdUaSE01oRMI*DcI@=6HB>MyYKyhz@9CP+vwc;n&W##&&BI1n(4Xl$Yw zkTQA*ujvH%EmQ@sSnaua-@S*bk(bixaNz6aRaayfg8gc+^AshxzlK3D(1{E7XMHnx-XN8NFH_f# zKu;$H9i#Cp4jlaI8S__)d~g-%Zw7l>!Cc36Ao#{lgYV9#rEd-d-wD=}R&(9&0;agE z1Is&YL-5u)P~G-!Fwh)43Gh^Z<(t9NZJ?u|y&bOycg`T1abNJ_Yu}+w?;gIl0SK$F zel6HzDRjoSf)_KGvHtDhZmk`4&KVnr$~!=%srJ&>gD1M*E?-+swBUQeK1Lt%+gV!< zj@~^7ckwG<3m!R>sOHv@w=d_2Du^iu@~qzSK&WGSGn{DpRq)WPNg-j_r`&ny@V)b( zk51hl{Ko!RL|Mlfz2*LGXVUrajs8>Jq{-t!RUk2|Z{WbMgT3>Pu&tD=I{*Zm9CqJwEF|73R2 z)GWxkZZuE^T-4B$XWtKDuAPgdEJQ;wS8gZDaAO^|@&;zBhB5CxQcUQi#70sQr~wo` zb2lkzFgG$clV*k#c{3?l(0#>h=p!gZL*Il3v?sTclHo#o5lC6cQqAl(m;p+FSXrVd z12hslNm&PaSm3IHY4t9uVscBg&M=m2pKzJycC93(G5`4c@Wc$l|@oPlYNvc{LZ19D2%1i zTL}}UtKi^8Bc3643p5+%`29qYD9v#!QgxR5O#@6GSP*V@w3~S~tHs{xn`-EAxU9fP zXM<31G%>52@X7+k1}`9&XrXsz21gG@fiMY0hypBO64rn{zKdcA2nG`XgQ9}pX}|*L zyNXqZ(@D68Fhc{<1ZcP~0gt#;OzsH73v@^@F3MnI9j6>#o?y{*lz*f@7#c+_hS7(Q z09g45X($qtoY#-z!`U4HLAHGiSm0;AAfz|R0?2n!P?+1yLPaxW!P?Nw-cO1ZzSoE| z8w9gMZlfk(AtX07bHyR-{Cz}_B6}kYU<@x$?j~~Z2F4c1ZIl8wY_)8rShk|SkK*eb z@Ioj=euHR}#rZ9P0(41fhPYxQzvCI^-@-zEbA3SePKvIB3d_c-fWbrmE{a<~h6HDU zJn}aS$KqK6M+Gt6MTA#Baf~x&q<55gF<#wJ6to#@y%V8l6ShQ#3jxA~VPrUyT9GSJ z&FZidHx>#jyetu#PvBCbo)tmd3H4cUm;ty*DFjo4yhL_0dcKLRJ^gJ9Uii%JneSsr)Y+i#3BCp9Qf(at4nbU{d&bbWth91y>Hz3lkemQM#PiT_s7t8)fq7Ag$FblWjs;gKTn z!sKdjXc%U{wo)7h&;$oZym$`SFyh8xKtku?GAFbLv^E=vdBBOW%|s~G@Gy|l1-*&` zOC@|5*hb6)5XVJIYfR`F1b)13SS`t3QlWssDK>sk=erp|WcQJ(95#F*AD|;#pXw~c z9oa#6TS*GPp(NxqA<{J{Y|wxsF0_RU>Hs6>w1x@^n%H7*2s4&Nhm=TW6|W^Y5}eHF z!XGCyqoh@LkqQUV0#=RRoH7m=^srL{11f8mvpKAnkR2*!qtD}XX0+}OP78K`D^R?8 zflZ`zm?|2$G8X+oAmR*#)Wa13Kp!SF*1D3=SBGUuH5TG8*Z+S6rhW6b>($S1=KK#w z510RNq4xP^%-&xfdGCQkql52U9{ubYk(&;Vm3P$BNxcV02j9E#vl3wOD}#ftE)CVr z>^V3*^wI683Ql-U-99kZ|93}m*Z=*2k-?8jQ%)qm`n{3CXHZ8>Ty;17d3gAv?x_r+>UZ^z)(mk9XrfcGdfsvF+4jhEgvN4*ho7Fl{d)h{z&*90%;*0@JTZkyf8TYHuhLt^)EsGRb^bBX!-vB!vp)^{KOUS?jI{}uH?Wu?+uRy!3{Jv z@ROC}63g}v95W;L*3jTDQOe*t^Xc-)hqt};t0+AIndgtJ*OF&&_YB_L;5zxy^5}u9 z6mCXtJv27(&~&=3gqy@;kojdh_v133=s#Tk*)mctfuBD-CG1=LN_p`0CN#(Y)6IAi zGS4j!$A}j1!QHdJC=dV14tu8EkJ#OQ?8ag|1snU1JT0j`1BVay-%;5~&j0gBdEZ&% zx+Z?ia7JYxLB+BA=L^HJxzCkH-d{T*HmQ|(tQ;br)D`_(r20cAOm#h+al{6=-Wx-M zuO0z8NzFu4e~hD3nK6AQ^5&+aTi1lpF)f}k@Dt2f|KZ5MUZn14Udi3CvG>LD@VoiX el&uJ)evEhyjOWegnv?qRzoJO{%k@8qz<&eeHH)zT diff --git a/ez_setup.py b/ez_setup.py deleted file mode 100644 index 1bcd3e94c..000000000 --- a/ez_setup.py +++ /dev/null @@ -1,332 +0,0 @@ -#!/usr/bin/env python -"""Bootstrap setuptools installation - -To use setuptools in your package's setup.py, include this -file in the same directory and add this to the top of your setup.py:: - - from ez_setup import use_setuptools - use_setuptools() - -To require a specific version of setuptools, set a download -mirror, or use an alternate download directory, simply supply -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import shutil -import sys -import tempfile -import zipfile -import optparse -import subprocess -import platform -import textwrap -import contextlib - -from distutils import log - -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -DEFAULT_VERSION = "4.0.1" -DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" - -def _python_cmd(*args): - """ - Return True if the command succeeded. - """ - args = (sys.executable,) + args - return subprocess.call(args) == 0 - - -def _install(archive_filename, install_args=()): - with archive_context(archive_filename): - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - - -def _build_egg(egg, archive_filename, to_dir): - with archive_context(archive_filename): - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -class ContextualZipFile(zipfile.ZipFile): - """ - Supplement ZipFile class to support context manager for Python 2.6 - """ - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - def __new__(cls, *args, **kwargs): - """ - Construct a ZipFile or ContextualZipFile as appropriate - """ - if hasattr(zipfile.ZipFile, '__exit__'): - return zipfile.ZipFile(*args, **kwargs) - return super(ContextualZipFile, cls).__new__(cls) - - -@contextlib.contextmanager -def archive_context(filename): - # extracting the archive - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - with ContextualZipFile(filename) as archive: - archive.extractall() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - yield - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - archive = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, archive, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - del sys.modules['pkg_resources'] - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15): - to_dir = os.path.abspath(to_dir) - rep_modules = 'pkg_resources', 'setuptools' - imported = set(sys.modules).intersection(rep_modules) - try: - import pkg_resources - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("setuptools>=" + version) - return - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, download_delay) - except pkg_resources.VersionConflict as VC_err: - if imported: - msg = textwrap.dedent(""" - The required version of setuptools (>={version}) is not available, - and can't be installed while this script is running. Please - install a more recent version first, using - 'easy_install -U setuptools'. - - (Currently using {VC_err.args[0]!r}) - """).format(VC_err=VC_err, version=version) - sys.stderr.write(msg) - sys.exit(2) - - # otherwise, reload ok - del pkg_resources, sys.modules['pkg_resources'] - return _do_download(version, download_base, to_dir, download_delay) - -def _clean_check(cmd, target): - """ - Run the command to download target. If the command fails, clean up before - re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell (which will validate - trust). Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - ps_cmd = ( - "[System.Net.WebRequest]::DefaultWebProxy.Credentials = " - "[System.Net.CredentialCache]::DefaultCredentials; " - "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" - % vars() - ) - cmd = [ - 'powershell', - '-Command', - ps_cmd, - ] - _clean_check(cmd, target) - -def has_powershell(): - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True - -download_file_powershell.viable = has_powershell - -def download_file_curl(url, target): - cmd = ['curl', url, '--silent', '--output', target] - _clean_check(cmd, target) - -def has_curl(): - cmd = ['curl', '--version'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True - -download_file_curl.viable = has_curl - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - -def has_wget(): - cmd = ['wget', '--version'] - with open(os.path.devnull, 'wb') as devnull: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except Exception: - return False - return True - -download_file_wget.viable = has_wget - -def download_file_insecure(url, target): - """ - Use Python to download the file, even though it cannot authenticate the - connection. - """ - src = urlopen(url) - try: - # Read all the data in one block. - data = src.read() - finally: - src.close() - - # Write all the data in one block to avoid creating a partial file. - with open(target, "wb") as dst: - dst.write(data) - -download_file_insecure.viable = lambda: True - -def get_best_downloader(): - downloaders = ( - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ) - viable_downloaders = (dl for dl in downloaders if dl.viable()) - return next(viable_downloaders, None) - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader): - """ - Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - zip_name = "setuptools-%s.zip" % version - url = download_base + zip_name - saveto = os.path.join(to_dir, zip_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package - """ - return ['--user'] if options.user_install else [] - -def _parse_args(): - """ - Parse the command line for options - """ - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package (requires Python 2.6 or later)') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - parser.add_option( - '--version', help="Specify which version to download", - default=DEFAULT_VERSION, - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - -def main(): - """Install or upgrade setuptools and EasyInstall""" - options = _parse_args() - archive = download_setuptools( - version=options.version, - download_base=options.download_base, - downloader_factory=options.downloader_factory, - ) - return _install(archive, _build_install_args(options)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/lbrynet/daemon/DaemonRequest.py b/lbrynet/daemon/DaemonRequest.py deleted file mode 100644 index 6f46e6bed..000000000 --- a/lbrynet/daemon/DaemonRequest.py +++ /dev/null @@ -1,185 +0,0 @@ -import time -import cgi -import mimetools -import os -import tempfile -from twisted.web import server - - -class DaemonRequest(server.Request): - """ - For LBRY specific request functionality. Currently just provides - handling for large multipart POST requests, taken from here: - http://sammitch.ca/2013/07/handling-large-requests-in-twisted/ - - For multipart POST requests, this populates self.args with temp - file objects instead of strings. Note that these files don't auto-delete - on close because we want to be able to move and rename them. - - """ - - # max amount of memory to allow any ~single~ request argument [ie: POSTed file] - # note: this value seems to be taken with a grain of salt, memory usage may spike - # FAR above this value in some cases. - # eg: set the memory limit to 5 MB, write 2 blocks of 4MB, mem usage will - # have spiked to 8MB before the data is rolled to disk after the - # second write completes. - memorylimit = 1024*1024*100 - - # enable/disable debug logging - do_log = False - - # re-defined only for debug/logging purposes - def gotLength(self, length): - if self.do_log: - print '%f Headers received, Content-Length: %d' % (time.time(), length) - server.Request.gotLength(self, length) - - # re-definition of twisted.web.server.Request.requestreceived, the only difference - # is that self.parse_multipart() is used rather than cgi.parse_multipart() - def requestReceived(self, command, path, version): - from twisted.web.http import parse_qs - if self.do_log: - print '%f Request Received' % time.time() - - self.content.seek(0, 0) - self.args = {} - self.stack = [] - - self.method, self.uri = command, path - self.clientproto = version - x = self.uri.split(b'?', 1) - - if len(x) == 1: - self.path = self.uri - else: - self.path, argstring = x - self.args = parse_qs(argstring, 1) - - # cache the client and server information, we'll need this later to be - # serialized and sent with the request so CGIs will work remotely - self.client = self.channel.transport.getPeer() - self.host = self.channel.transport.getHost() - - # Argument processing - args = self.args - ctype = self.requestHeaders.getRawHeaders(b'content-type') - if ctype is not None: - ctype = ctype[0] - - if self.method == b"POST" and ctype: - mfd = b'multipart/form-data' - key, pdict = cgi.parse_header(ctype) - if key == b'application/x-www-form-urlencoded': - args.update(parse_qs(self.content.read(), 1)) - elif key == mfd: - try: - self.content.seek(0, 0) - args.update(self.parse_multipart(self.content, pdict)) - - except KeyError as e: - if e.args[0] == b'content-disposition': - # Parse_multipart can't cope with missing - # content-dispostion headers in multipart/form-data - # parts, so we catch the exception and tell the client - # it was a bad request. - self.channel.transport.write( - b"HTTP/1.1 400 Bad Request\r\n\r\n") - self.channel.transport.loseConnection() - return - raise - - self.content.seek(0, 0) - - self.process() - - # re-definition of cgi.parse_multipart that uses a single temporary file to store - # data rather than storing 2 to 3 copies in various lists. - def parse_multipart(self, fp, pdict): - if self.do_log: - print '%f Parsing Multipart data: ' % time.time() - rewind = fp.tell() #save cursor - fp.seek(0, 0) #reset cursor - - boundary = "" - if 'boundary' in pdict: - boundary = pdict['boundary'] - if not cgi.valid_boundary(boundary): - raise ValueError('Invalid boundary in multipart form: %r' % (boundary,)) - - nextpart = "--" + boundary - lastpart = "--" + boundary + "--" - partdict = {} - terminator = "" - - while terminator != lastpart: - c_bytes = -1 - - data = tempfile.NamedTemporaryFile(delete=False) - if terminator: - # At start of next part. Read headers first. - headers = mimetools.Message(fp) - clength = headers.getheader('content-length') - if clength: - try: - c_bytes = int(clength) - except ValueError: - pass - if c_bytes > 0: - data.write(fp.read(c_bytes)) - # Read lines until end of part. - while 1: - line = fp.readline() - if not line: - terminator = lastpart # End outer loop - break - if line[:2] == "--": - terminator = line.strip() - if terminator in (nextpart, lastpart): - break - data.write(line) - # Done with part. - if data.tell() == 0: - continue - if c_bytes < 0: - # if a Content-Length header was not supplied with the MIME part - # then the trailing line break must be removed. - # we have data, read the last 2 bytes - rewind = min(2, data.tell()) - data.seek(-rewind, os.SEEK_END) - line = data.read(2) - if line[-2:] == "\r\n": - data.seek(-2, os.SEEK_END) - data.truncate() - elif line[-1:] == "\n": - data.seek(-1, os.SEEK_END) - data.truncate() - - line = headers['content-disposition'] - if not line: - continue - key, params = cgi.parse_header(line) - if key != 'form-data': - continue - if 'name' in params: - name = params['name'] - # kludge in the filename - if 'filename' in params: - fname_index = name + '_filename' - if fname_index in partdict: - partdict[fname_index].append(params['filename']) - else: - partdict[fname_index] = [params['filename']] - else: - # Unnamed parts are not returned at all. - continue - data.seek(0, 0) - if name in partdict: - partdict[name].append(data) - else: - partdict[name] = [data] - - fp.seek(rewind) # Restore cursor - return partdict - - diff --git a/lbrynet/daemon/DaemonServer.py b/lbrynet/daemon/DaemonServer.py index d506c1245..588f5a936 100644 --- a/lbrynet/daemon/DaemonServer.py +++ b/lbrynet/daemon/DaemonServer.py @@ -9,7 +9,6 @@ from lbrynet import conf from lbrynet.daemon.Daemon import Daemon from lbrynet.daemon.auth.auth import PasswordChecker, HttpPasswordRealm from lbrynet.daemon.auth.util import initialize_api_key_file -from lbrynet.daemon.DaemonRequest import DaemonRequest log = logging.getLogger(__name__) @@ -36,7 +35,6 @@ class DaemonServer(object): self.root.putChild(conf.settings['API_ADDRESS'], self._daemon) lbrynet_server = get_site_base(use_auth, self.root) - lbrynet_server.requestFactory = DaemonRequest try: self.server_port = reactor.listenTCP( diff --git a/lbrynet/daemon/daemon_scripts/Autofetcher.py b/lbrynet/daemon/daemon_scripts/Autofetcher.py deleted file mode 100644 index 1cba26d3c..000000000 --- a/lbrynet/daemon/daemon_scripts/Autofetcher.py +++ /dev/null @@ -1,61 +0,0 @@ -import json -import logging.handlers -import os - -from twisted.internet.task import LoopingCall -from twisted.internet import reactor -from lbrynet import conf - - -conf.initialize_settings() -log_dir = conf.settings['data_dir'] -LOG_FILENAME = os.path.join(log_dir, 'lbrynet-daemon.log') - -if os.path.isfile(LOG_FILENAME): - f = open(LOG_FILENAME, 'r') - PREVIOUS_LOG = len(f.read()) - f.close() -else: - PREVIOUS_LOG = 0 - -log = logging.getLogger(__name__) -handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=2097152, backupCount=5) -log.addHandler(handler) -log.setLevel(logging.INFO) - - -class Autofetcher(object): - """ - Download name claims as they occur - """ - - def __init__(self, api): - self._api = api - self._checker = LoopingCall(self._check_for_new_claims) - self.best_block = None - - def start(self): - reactor.addSystemEventTrigger('before', 'shutdown', self.stop) - self._checker.start(5) - - def stop(self): - log.info("Stopping autofetcher") - self._checker.stop() - - def _check_for_new_claims(self): - block = self._api.get_best_blockhash() - if block != self.best_block: - log.info("Checking new block for name claims, block hash: %s" % block) - self.best_block = block - transactions = self._api.get_block({'blockhash': block})['tx'] - for t in transactions: - c = self._api.get_claims_for_tx({'txid': t}) - if len(c): - for i in c: - log.info("Downloading stream for claim txid: %s" % t) - self._api.get({'name': t, 'stream_info': json.loads(i['value'])}) - - -def run(api): - fetcher = Autofetcher(api) - fetcher.start() diff --git a/lbrynet/daemon/daemon_scripts/__init__.py b/lbrynet/daemon/daemon_scripts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lbrynet/daemon/daemon_scripts/migrateto025.py b/lbrynet/daemon/daemon_scripts/migrateto025.py deleted file mode 100644 index 0ab6ee2fd..000000000 --- a/lbrynet/daemon/daemon_scripts/migrateto025.py +++ /dev/null @@ -1,37 +0,0 @@ -from twisted.internet import defer - - -class migrator(object): - """ - Re-resolve lbry names to write missing data to blockchain.db and to cache the nametrie - """ - - def __init__(self, api): - self._api = api - - def start(self): - def _resolve_claims(claimtrie): - claims = [i for i in claimtrie if 'txid' in i.keys()] - r = defer.DeferredList( - [self._api._resolve_name(claim['name'], force_refresh=True) for claim in claims], - consumeErrors=True) - return r - - def _restart_lbry_files(): - def _restart_lbry_file(lbry_file): - return lbry_file.restore() - - lbry_files = self._api.lbry_file_manager.lbry_files - r = defer.DeferredList( - [_restart_lbry_file(lbry_file) for lbry_file in lbry_files if not lbry_file.txid], - consumeErrors=True) - return r - - d = self._api.session.wallet.get_nametrie() - d.addCallback(_resolve_claims) - d.addCallback(lambda _: _restart_lbry_files()) - - -def run(api): - refresher = migrator(api) - refresher.start() diff --git a/setup.py b/setup.py index 1a88a7431..98eb075dd 100644 --- a/setup.py +++ b/setup.py @@ -65,8 +65,5 @@ setup( packages=find_packages(base_dir), install_requires=requires, entry_points={'console_scripts': console_scripts}, - package_data={ - package_name: list(package_files('lbrynet/resources/ui')) - }, zip_safe=False, ) diff --git a/uri_handler/__init__.py b/uri_handler/__init__.py deleted file mode 100644 index e69de29bb..000000000 From 896f57a129fb1b469e7f1e63c12003301cee819f Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 29 Nov 2017 12:17:04 -0500 Subject: [PATCH 2/3] remove FAQ.md, update doc link in README.md --- FAQ.md | 86 ------------------------------------------------------- README.md | 2 +- 2 files changed, 1 insertion(+), 87 deletions(-) delete mode 100644 FAQ.md diff --git a/FAQ.md b/FAQ.md deleted file mode 100644 index 88916ef03..000000000 --- a/FAQ.md +++ /dev/null @@ -1,86 +0,0 @@ -#### Getting LBRY for development - -Q: How do I get lbry for command line? - -A: In order to run lbry from command line, you need more than the packaged app/deb. - -###### On OS X - -You can install LBRY command line by running `curl -sL https://rawgit.com/lbryio/lbry-setup/master/lbry_setup_osx.sh | sudo bash` in a terminal. This script will install lbrynet and its dependancies, as well as the app. - -###### On Linux - -On Ubuntu or Mint you can install the prerequisites and lbrynet by running - - sudo apt-get install libgmp3-dev build-essential python2.7 python2.7-dev python-pip git - git clone https://github.com/lbryio/lbry.git - cd lbry - sudo python setup.py install - -#### Using LBRY - -Q: How do I run lbry from command line? - -A: The command is `lbrynet-daemon` - -*********** - -Q: How do I stop lbry from the command line? - -A: You can ctrl-c or run `stop-lbrynet-daemon` - -*********** - -Q: How do I run lbry with lbrycrdd (the blockchain node application)? - -A: Start lbry with the --wallet flag set: `lbrynet-daemon --wallet=lbrycrd` - -Note: when you change the wallet it is persistant until you specify you want to use another wallet - lbryum - with the --wallet flag again. - -*********** - -Q: Where are all the behind the scenes files? - -A: On linux, the relevant directories are `~/.lbrynet`, `~/.lbrycrd`, and `~/.lbryum`, depending on which wallets you've used. On OS X, the folders of interest are `~/Library/Application Support/LBRY`, `~/.lbrycrd` and `~/.lbryum`, also depending on which wallets you've used. - -*********** - -Q: How can I see the log in the console? - -A: Run lbry with the --log-to-console flag set: `lbrynet-daemon --log-to-console` - -*********** - -Q: How do I specify a web-UI to use? - -A: If the files for the UI you'd like to use are storred locally on your computer, start lbry with the --ui flag: `lbrynet-daemon --ui=/full/path/to/ui/files/root/folder` - -Note, once set with the UI flag the given UI will be cached by lbry and used as the default going forward. Also, it will only successfully load a UI if it contains a conforming requirements.txt file to specify required lbrynet and lbryum versions. [Here](https://github.com/lbryio/lbry-web-ui/blob/master/dist/requirements.txt) is an example requirements.txt file. - -To reset your ui to pull from lbryio, or to try a UI still in development, run lbry with the --branch flag: `lbrynet=daemon --branch=master` - -*********** - -Q: How do I see the list of API functions I can call, and how do I call them? - -A: Here is an example script to get the documentation for the various API calls. To use any of the functions displayed, just provide any specified arguments in a dictionary. - -Note: the lbry api can only be used while either the app or lbrynet-daemon command line are running - - import sys - from jsonrpc.proxy import JSONRPCProxy - - try: - from lbrynet.conf import API_CONNECTION_STRING - except: - print "You don't have lbrynet installed!" - sys.exit(0) - - api = JSONRPCProxy.from_url(API_CONNECTION_STRING) - status = api.status() - if not status['is_running']: - print status - else: - for cmd in api.commands(): - print "%s:\n%s" % (cmd, api.help({'command': cmd})) - diff --git a/README.md b/README.md index bf26bc6ca..2798ae99e 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ By default, `lbrynet-daemon` will provide a JSON-RPC server at `http://localhost Our [quickstart guide](http://lbry.io/quickstart) provides a simple walkthrough and examples for learning. -The full API is documented [here](https://lbry.io/api). +The full API is documented [here](https://lbryio.github.io/lbry/cli). ## What is LBRY? From 552856ebaa7106decedeb6b1d1e4754b413aa665 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 29 Nov 2017 12:35:53 -0500 Subject: [PATCH 3/3] remove old release scripts --- build/changelog.py | 129 ---------------------------- build/release.py | 204 --------------------------------------------- 2 files changed, 333 deletions(-) delete mode 100644 build/changelog.py delete mode 100644 build/release.py diff --git a/build/changelog.py b/build/changelog.py deleted file mode 100644 index 322fc30a7..000000000 --- a/build/changelog.py +++ /dev/null @@ -1,129 +0,0 @@ -import datetime -import re - -CHANGELOG_START_RE = re.compile(r'^\#\# \[Unreleased\]') -CHANGELOG_END_RE = re.compile(r'^\#\# \[.*\] - \d{4}-\d{2}-\d{2}') -# if we come across a section header between two release section headers -# then we probably have an improperly formatted changelog -CHANGELOG_ERROR_RE = re.compile(r'^\#\# ') -SECTION_RE = re.compile(r'^\#\#\# (.*)$') -EMPTY_RE = re.compile(r'^\w*\*\w*$') -ENTRY_RE = re.compile(r'\* (.*)') -VALID_SECTIONS = ['Added', 'Changed', 'Deprecated', 'Removed', 'Fixed', 'Security'] - -# allocate some entries to cut-down on merge conflicts -TEMPLATE = """### Added - * - * - -### Changed - * - * - -### Fixed - * - * - -### Deprecated - * - * - -### Removed - * - * - -""" - - -class Changelog(object): - def __init__(self, path): - self.path = path - self.start = [] - self.unreleased = [] - self.rest = [] - self._parse() - - def _parse(self): - with open(self.path) as fp: - lines = fp.readlines() - - unreleased_start_found = False - unreleased_end_found = False - - for line in lines: - if not unreleased_start_found: - self.start.append(line) - if CHANGELOG_START_RE.search(line): - unreleased_start_found = True - continue - if unreleased_end_found: - self.rest.append(line) - continue - if CHANGELOG_END_RE.search(line): - self.rest.append(line) - unreleased_end_found = True - continue - if CHANGELOG_ERROR_RE.search(line): - raise Exception( - 'Failed to parse {}: {}'.format(self.path, 'unexpected section header found')) - self.unreleased.append(line) - - self.unreleased = self._normalize_section(self.unreleased) - - @staticmethod - def _normalize_section(lines): - """Parse a changelog entry and output a normalized form""" - sections = {} - current_section_name = None - current_section_contents = [] - for line in lines: - line = line.strip() - if not line or EMPTY_RE.match(line): - continue - match = SECTION_RE.match(line) - if match: - if current_section_contents: - sections[current_section_name] = current_section_contents - current_section_contents = [] - current_section_name = match.group(1) - if current_section_name not in VALID_SECTIONS: - raise ValueError("Section '{}' is not valid".format(current_section_name)) - continue - match = ENTRY_RE.match(line) - if match: - current_section_contents.append(match.group(1)) - continue - raise Exception('Something is wrong with line: {}'.format(line)) - if current_section_contents: - sections[current_section_name] = current_section_contents - - output = [] - for section in VALID_SECTIONS: - if section not in sections: - continue - output.append('### {}'.format(section)) - for entry in sections[section]: - output.append(' * {}'.format(entry)) - output.append("\n") - return output - - def get_unreleased(self): - return '\n'.join(self.unreleased) if self.unreleased else None - - def bump(self, version): - if not self.unreleased: - return - - today = datetime.datetime.today() - header = "## [{}] - {}\n\n".format(version, today.strftime('%Y-%m-%d')) - - changelog_data = ( - ''.join(self.start) + - TEMPLATE + - header + - '\n'.join(self.unreleased) + '\n\n' - + ''.join(self.rest) - ) - - with open(self.path, 'w') as fp: - fp.write(changelog_data) diff --git a/build/release.py b/build/release.py deleted file mode 100644 index 0f736cc14..000000000 --- a/build/release.py +++ /dev/null @@ -1,204 +0,0 @@ -"""Bump version and create Github release - -This script should be run locally, not on a build server. -""" -import argparse -import contextlib -import os -import re -import subprocess -import sys - -import git -import github - -import changelog - -ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - -def main(): - bumpversion_parts = get_bumpversion_parts() - - parser = argparse.ArgumentParser() - parser.add_argument("part", choices=bumpversion_parts, help="part of version to bump") - parser.add_argument("--skip-sanity-checks", action="store_true") - parser.add_argument("--skip-push", action="store_true") - parser.add_argument("--dry-run", action="store_true") - parser.add_argument("--confirm", action="store_true") - args = parser.parse_args() - - if args.dry_run: - print "DRY RUN. Nothing will be committed/pushed." - - repo = Repo('lbry', args.part, ROOT) - branch = 'master' - - print 'Current version: {}'.format(repo.current_version) - print 'New version: {}'.format(repo.new_version) - - if not args.confirm and not confirm(): - print "Aborting" - return 1 - - if not args.skip_sanity_checks: - run_sanity_checks(repo, branch) - repo.assert_new_tag_is_absent() - - is_rc = re.search('\drc\d+$', repo.new_version) is not None - # only have a release message for real releases, not for RCs - release_msg = None if is_rc else repo.get_unreleased_changelog() - if release_msg is None: - release_msg = '' - - if args.dry_run: - print "rc: " + ("yes" if is_rc else "no") - print "release message: \n" + (release_msg if not is_rc else " NO MESSAGE FOR RCs") - return - - gh_token = get_gh_token() - auth = github.Github(gh_token) - github_repo = auth.get_repo('lbryio/lbry') - - if not is_rc: - repo.bump_changelog() - repo.bumpversion() - - new_tag = repo.get_new_tag() - github_repo.create_git_release(new_tag, new_tag, release_msg, draft=True, prerelease=is_rc) - - if args.skip_push: - print ( - 'Skipping push; you will have to reset and delete tags if ' - 'you want to run this script again.' - ) - else: - repo.git_repo.git.push(follow_tags=True, recurse_submodules='check') - - -class Repo(object): - def __init__(self, name, part, directory): - self.name = name - self.part = part - if not self.part: - raise Exception('Part required') - self.directory = directory - self.git_repo = git.Repo(self.directory) - self._bumped = False - - self.current_version = self._get_current_version() - self.new_version = self._get_new_version() - self._changelog = changelog.Changelog(os.path.join(self.directory, 'CHANGELOG.md')) - - def get_new_tag(self): - return 'v' + self.new_version - - def get_unreleased_changelog(self): - return self._changelog.get_unreleased() - - def bump_changelog(self): - self._changelog.bump(self.new_version) - with pushd(self.directory): - self.git_repo.git.add(os.path.basename(self._changelog.path)) - - def _get_current_version(self): - with pushd(self.directory): - output = subprocess.check_output( - ['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part]) - return re.search('^current_version=(.*)$', output, re.M).group(1) - - def _get_new_version(self): - with pushd(self.directory): - output = subprocess.check_output( - ['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part]) - return re.search('^new_version=(.*)$', output, re.M).group(1) - - def bumpversion(self): - if self._bumped: - raise Exception('Cowardly refusing to bump a repo twice') - with pushd(self.directory): - subprocess.check_call(['bumpversion', '--allow-dirty', self.part]) - self._bumped = True - - def assert_new_tag_is_absent(self): - new_tag = self.get_new_tag() - tags = self.git_repo.git.tag() - if new_tag in tags.split('\n'): - raise Exception('Tag {} is already present in repo {}.'.format(new_tag, self.name)) - - def is_behind(self, branch): - self.git_repo.remotes.origin.fetch() - rev_list = '{branch}...origin/{branch}'.format(branch=branch) - commits_behind = self.git_repo.git.rev_list(rev_list, right_only=True, count=True) - commits_behind = int(commits_behind) - return commits_behind > 0 - - -def get_bumpversion_parts(): - with pushd(ROOT): - output = subprocess.check_output([ - 'bumpversion', '--dry-run', '--list', '--allow-dirty', 'fake-part', - ]) - parse_line = re.search('^parse=(.*)$', output, re.M).group(1) - return tuple(re.findall('<([^>]+)>', parse_line)) - - -def get_gh_token(): - if 'GH_TOKEN' in os.environ: - return os.environ['GH_TOKEN'] - else: - print """ -Please enter your personal access token. If you don't have one -See https://github.com/lbryio/lbry-app/wiki/Release-Script#generate-a-personal-access-token -for instructions on how to generate one. - -You can also set the GH_TOKEN environment variable to avoid seeing this message -in the future""" - return raw_input('token: ').strip() - - -def confirm(): - try: - return raw_input('Is this what you want? [y/N] ').strip().lower() == 'y' - except KeyboardInterrupt: - return False - - -def run_sanity_checks(repo, branch): - if repo.git_repo.is_dirty(): - print 'Cowardly refusing to release a dirty repo' - sys.exit(1) - if repo.git_repo.active_branch.name != branch: - print 'Cowardly refusing to release when not on the {} branch'.format(branch) - sys.exit(1) - if repo.is_behind(branch): - print 'Cowardly refusing to release when behind origin' - sys.exit(1) - if not is_custom_bumpversion_version(): - print ( - 'Install LBRY\'s fork of bumpversion: ' - 'pip install -U git+https://github.com/lbryio/bumpversion.git' - ) - sys.exit(1) - - -def is_custom_bumpversion_version(): - try: - output = subprocess.check_output(['bumpversion', '-v'], stderr=subprocess.STDOUT).strip() - if output == 'bumpversion 0.5.4-lbry': - return True - except (subprocess.CalledProcessError, OSError): - pass - return False - - -@contextlib.contextmanager -def pushd(new_dir): - previous_dir = os.getcwd() - os.chdir(new_dir) - yield - os.chdir(previous_dir) - - -if __name__ == '__main__': - sys.exit(main())