Flake8 CI fixes (#451)
* Pep8 fixes * tox Pep8 compliance * Excluded external tools folder from flake 8 tests * Added Flake 8 exclusions * Pep8 fixes * Pep8 fixes * Corrected type * Pep8 fixes * Pep 8 compliance * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep 8 fixes * Pep 8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 * Pep8 * Pep 8 * Pep 8 * Pep8 fixes * Pep8 * Pep8 * Pep8 * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Revert chagnes * Revert changes to kivy/__init.py * Revert changes * REvert changes * Revert changes * Revert changes to toolchain * Add files exclusions to tox.ini * Added exclusions for alias recipes * Remove dead code * Added py extension to recipes * Removed recipe build skip * Improves recipe matching Previous expression was matching all the following three lines of a `git diff --name-only` output. ``` recipes/hostlibffi/__init__.py recipes/hostpython.py recipes/hostpython2/__init__.py ``` This was resulting to a bug when later splitting with `recipe = file_path.split('/')[1]` the `recipes/hostpython.py` string would return including the `\n` new line char, see: ``` >>> 'recipes/hostpython.py\n'.split('/')[1] 'hostpython.py\n' >>> 'recipes/hostlibffi/__init__.py\n'.split('/')[1] 'hostlibffi' >>> ``` Co-authored-by: Andre Miras <AndreMiras@users.noreply.github.com>
This commit is contained in:
parent
85f849e187
commit
64bd692632
46 changed files with 328 additions and 241 deletions
|
@ -1,6 +1,7 @@
|
||||||
import sh
|
import sh
|
||||||
import shutil
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from fnmatch import fnmatch
|
||||||
|
|
||||||
|
|
||||||
def modified_recipes(branch='origin/master'):
|
def modified_recipes(branch='origin/master'):
|
||||||
"""
|
"""
|
||||||
|
@ -14,11 +15,12 @@ def modified_recipes(branch='origin/master'):
|
||||||
git_diff = sh.contrib.git.diff('--name-only', branch)
|
git_diff = sh.contrib.git.diff('--name-only', branch)
|
||||||
recipes = set()
|
recipes = set()
|
||||||
for file_path in git_diff:
|
for file_path in git_diff:
|
||||||
if 'recipes/' in file_path:
|
if fnmatch(file_path, "recipes/*/__init__.py\n"):
|
||||||
recipe = file_path.split('/')[1]
|
recipe = file_path.split('/')[1]
|
||||||
recipes.add(recipe)
|
recipes.add(recipe)
|
||||||
return recipes
|
return recipes
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
updated_recipes = " ".join(modified_recipes())
|
updated_recipes = " ".join(modified_recipes())
|
||||||
if updated_recipes != '':
|
if updated_recipes != '':
|
||||||
|
|
|
@ -10,4 +10,3 @@ class AudiostreamRecipe(CythonRecipe):
|
||||||
|
|
||||||
|
|
||||||
recipe = AudiostreamRecipe()
|
recipe = AudiostreamRecipe()
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class ClickRecipe(PythonRecipe):
|
class ClickRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
|
@ -20,5 +22,5 @@ class ClickRecipe(PythonRecipe):
|
||||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = ClickRecipe()
|
|
||||||
|
|
||||||
|
recipe = ClickRecipe()
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from toolchain import CythonRecipe
|
from toolchain import CythonRecipe
|
||||||
|
|
||||||
|
|
||||||
class CurlyRecipe(CythonRecipe):
|
class CurlyRecipe(CythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/tito/curly/archive/{version}.zip"
|
url = "https://github.com/tito/curly/archive/{version}.zip"
|
||||||
|
|
|
@ -3,10 +3,7 @@ Author: Lawrence Du, Lukasz Mach
|
||||||
E-mail: larrydu88@gmail.com, maho@pagema.net
|
E-mail: larrydu88@gmail.com, maho@pagema.net
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from toolchain import CythonRecipe,shprint
|
from toolchain import CythonRecipe
|
||||||
import os
|
|
||||||
from os.path import join
|
|
||||||
import sh
|
|
||||||
|
|
||||||
|
|
||||||
class CymunkRecipe(CythonRecipe):
|
class CymunkRecipe(CythonRecipe):
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class DistributeRecipe(PythonRecipe):
|
class DistributeRecipe(PythonRecipe):
|
||||||
version = "0.7.3"
|
version = "0.7.3"
|
||||||
|
@ -18,4 +20,5 @@ class DistributeRecipe(PythonRecipe):
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = DistributeRecipe()
|
recipe = DistributeRecipe()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join, exists
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from toolchain import CythonRecipe, shprint
|
from toolchain import CythonRecipe
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh
|
|
||||||
|
|
||||||
|
|
||||||
class FFPyplayerRecipe(CythonRecipe):
|
class FFPyplayerRecipe(CythonRecipe):
|
||||||
|
@ -34,4 +33,3 @@ class FFPyplayerRecipe(CythonRecipe):
|
||||||
|
|
||||||
|
|
||||||
recipe = FFPyplayerRecipe()
|
recipe = FFPyplayerRecipe()
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class FlaskRecipe(PythonRecipe):
|
class FlaskRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/mitsuhiko/flask/archive/{version}.zip"
|
url = "https://github.com/mitsuhiko/flask/archive/{version}.zip"
|
||||||
depends = ["python","jinja2","werkzeug","itsdangerous","click"]
|
depends = ["python", "jinja2", "werkzeug", "itsdangerous", "click"]
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
arch = list(self.filtered_archs)[0]
|
arch = list(self.filtered_archs)[0]
|
||||||
|
@ -20,5 +22,5 @@ class FlaskRecipe(PythonRecipe):
|
||||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = FlaskRecipe()
|
|
||||||
|
|
||||||
|
recipe = FlaskRecipe()
|
||||||
|
|
|
@ -32,4 +32,3 @@ class FreetypeRecipe(Recipe):
|
||||||
|
|
||||||
|
|
||||||
recipe = FreetypeRecipe()
|
recipe = FreetypeRecipe()
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join, exists
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import os
|
import os
|
||||||
import fnmatch
|
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,4 +30,5 @@ class HostSetuptools(Recipe):
|
||||||
os.remove('easy-install.pth')
|
os.remove('easy-install.pth')
|
||||||
shutil.rmtree('EGG-INFO')
|
shutil.rmtree('EGG-INFO')
|
||||||
|
|
||||||
|
|
||||||
recipe = HostSetuptools()
|
recipe = HostSetuptools()
|
||||||
|
|
|
@ -5,6 +5,7 @@ import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class LibffiRecipe(Recipe):
|
class LibffiRecipe(Recipe):
|
||||||
version = "3.2.1"
|
version = "3.2.1"
|
||||||
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
||||||
|
@ -29,7 +30,7 @@ class LibffiRecipe(Recipe):
|
||||||
return
|
return
|
||||||
# necessary as it doesn't compile with XCode 6.0. If we use 5.1.1, the
|
# necessary as it doesn't compile with XCode 6.0. If we use 5.1.1, the
|
||||||
# compiler for i386 is not working.
|
# compiler for i386 is not working.
|
||||||
#shprint(sh.sed,
|
# shprint(sh.sed,
|
||||||
# "-i.bak",
|
# "-i.bak",
|
||||||
# "s/-miphoneos-version-min=5.1.1/-miphoneos-version-min=6.0/g",
|
# "s/-miphoneos-version-min=5.1.1/-miphoneos-version-min=6.0/g",
|
||||||
# "generate-darwin-source-and-headers.py")
|
# "generate-darwin-source-and-headers.py")
|
||||||
|
@ -63,4 +64,5 @@ class LibffiRecipe(Recipe):
|
||||||
def postbuild_arch(self, arch):
|
def postbuild_arch(self, arch):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
recipe = LibffiRecipe()
|
recipe = LibffiRecipe()
|
||||||
|
|
|
@ -5,6 +5,7 @@ import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HostpythonAliasRecipe(Recipe):
|
class HostpythonAliasRecipe(Recipe):
|
||||||
is_alias = True
|
is_alias = True
|
||||||
|
|
||||||
|
@ -24,4 +25,5 @@ class HostpythonAliasRecipe(Recipe):
|
||||||
if hostpython:
|
if hostpython:
|
||||||
self.depends = [hostpython]
|
self.depends = [hostpython]
|
||||||
|
|
||||||
|
|
||||||
recipe = HostpythonAliasRecipe()
|
recipe = HostpythonAliasRecipe()
|
||||||
|
|
|
@ -3,8 +3,10 @@ Stub functions for _scproxy on OsX
|
||||||
No proxy is supported yet.
|
No proxy is supported yet.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def _get_proxy_settings():
|
def _get_proxy_settings():
|
||||||
return {'exclude_simple': 1}
|
return {'exclude_simple': 1}
|
||||||
|
|
||||||
|
|
||||||
def _get_proxies():
|
def _get_proxies():
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
from distutils.core import setup, Extension
|
from distutils.core import setup, Extension
|
||||||
import os
|
|
||||||
|
|
||||||
setup(name='ios',
|
setup(name='ios',
|
||||||
version='1.1',
|
version='1.1',
|
||||||
ext_modules=[
|
ext_modules=[
|
||||||
Extension(
|
Extension('ios',
|
||||||
'ios', ['ios.c', 'ios_utils.m', 'ios_mail.m', 'ios_browser.m',
|
['ios.c', 'ios_utils.m', 'ios_mail.m', 'ios_browser.m',
|
||||||
'ios_filechooser.m'],
|
'ios_filechooser.m'],
|
||||||
libraries=[ ],
|
libraries=[],
|
||||||
library_dirs=[],
|
library_dirs=[],
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class ItsDangerousRecipe(PythonRecipe):
|
class ItsDangerousRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
|
@ -20,5 +22,5 @@ class ItsDangerousRecipe(PythonRecipe):
|
||||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = ItsDangerousRecipe()
|
|
||||||
|
|
||||||
|
recipe = ItsDangerousRecipe()
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class Jinja2Recipe(PythonRecipe):
|
class Jinja2Recipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/mitsuhiko/jinja2/archive/{version}.zip"
|
url = "https://github.com/mitsuhiko/jinja2/archive/{version}.zip"
|
||||||
depends = ["python","markupsafe"]
|
depends = ["python", "markupsafe"]
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
arch = list(self.filtered_archs)[0]
|
arch = list(self.filtered_archs)[0]
|
||||||
|
@ -20,5 +22,5 @@ class Jinja2Recipe(PythonRecipe):
|
||||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = Jinja2Recipe()
|
|
||||||
|
|
||||||
|
recipe = Jinja2Recipe()
|
||||||
|
|
|
@ -6,7 +6,7 @@ E-mail: larrydu88@gmail.com
|
||||||
from toolchain import CythonRecipe, shprint
|
from toolchain import CythonRecipe, shprint
|
||||||
import sh
|
import sh
|
||||||
from os.path import join
|
from os.path import join
|
||||||
from os import environ, chdir
|
from os import chdir
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -16,46 +16,41 @@ class KiventCoreRecipe(CythonRecipe):
|
||||||
version = 'master'
|
version = 'master'
|
||||||
url = 'https://github.com/kivy/kivent/archive/{version}.zip'
|
url = 'https://github.com/kivy/kivent/archive/{version}.zip'
|
||||||
name = 'kivent_core'
|
name = 'kivent_core'
|
||||||
depends = ['libffi','kivy'] #note: unsure if libffi is necessary here
|
depends = ['libffi', 'kivy'] # note: unsure if libffi is necessary here
|
||||||
pre_build_ext=False
|
pre_build_ext = False
|
||||||
subbuilddir = False
|
subbuilddir = False
|
||||||
cythonize = True
|
cythonize = True
|
||||||
pbx_frameworks = ["OpenGLES"] #note: This line may be unnecessary
|
pbx_frameworks = ["OpenGLES"] # note: This line may be unnecessary
|
||||||
|
|
||||||
|
|
||||||
def get_recipe_env(self, arch):
|
def get_recipe_env(self, arch):
|
||||||
env = super(KiventCoreRecipe,self).get_recipe_env(arch)
|
env = super(KiventCoreRecipe, self).get_recipe_env(arch)
|
||||||
env['CYTHONPATH'] = self.get_recipe(
|
env['CYTHONPATH'] = self.get_recipe(
|
||||||
'kivy', self.ctx).get_build_dir(arch.arch)
|
'kivy', self.ctx).get_build_dir(arch.arch)
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
def get_build_dir(self, arch, sub=False):
|
||||||
def get_build_dir(self,arch, sub=False):
|
|
||||||
"""
|
"""
|
||||||
Call this to get the correct build_dir, where setup.py is located which is
|
Call this to get the correct build_dir, where setup.py is located which is
|
||||||
actually under modules/core/setup.py
|
actually under modules/core/setup.py
|
||||||
"""
|
"""
|
||||||
builddir = super(KiventCoreRecipe, self).get_build_dir(str(arch))
|
builddir = super(KiventCoreRecipe, self).get_build_dir(str(arch))
|
||||||
if sub or self.subbuilddir:
|
if sub or self.subbuilddir:
|
||||||
core_build_dir = join (builddir, 'modules', 'core')
|
core_build_dir = join(builddir, 'modules', 'core')
|
||||||
logger.info("Core build directory is located at {}".format(core_build_dir))
|
logger.info("Core build directory is located at {}".format(core_build_dir))
|
||||||
return core_build_dir
|
return core_build_dir
|
||||||
else:
|
else:
|
||||||
logger.info("Building in {}".format(builddir))
|
logger.info("Building in {}".format(builddir))
|
||||||
return builddir
|
return builddir
|
||||||
|
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
"""
|
"""
|
||||||
Override build.arch to avoid calling setup.py here (Call it in
|
Override build.arch to avoid calling setup.py here (Call it in
|
||||||
install() instead).
|
install() instead).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.subbuildir = True
|
self.subbuildir = True
|
||||||
self.cythonize_build()
|
self.cythonize_build()
|
||||||
self.biglink()
|
self.biglink()
|
||||||
self.subbuilddir=False
|
self.subbuilddir = False
|
||||||
|
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
"""
|
"""
|
||||||
|
@ -73,35 +68,35 @@ class KiventCoreRecipe(CythonRecipe):
|
||||||
"""
|
"""
|
||||||
arch = list(self.filtered_archs)[0]
|
arch = list(self.filtered_archs)[0]
|
||||||
|
|
||||||
build_dir = self.get_build_dir(arch.arch,sub=True)
|
build_dir = self.get_build_dir(arch.arch, sub=True)
|
||||||
logger.info("Building kivent_core {} in {}".format(arch.arch,build_dir))
|
logger.info("Building kivent_core {} in {}".format(arch.arch, build_dir))
|
||||||
chdir(build_dir)
|
chdir(build_dir)
|
||||||
hostpython = sh.Command(self.ctx.hostpython)
|
hostpython = sh.Command(self.ctx.hostpython)
|
||||||
|
|
||||||
#Get the appropriate environment for this recipe (including CYTHONPATH)
|
# Get the appropriate environment for this recipe (including CYTHONPATH)
|
||||||
#build_env = arch.get_env()
|
# build_env = arch.get_env()
|
||||||
build_env = self.get_recipe_env(arch)
|
build_env = self.get_recipe_env(arch)
|
||||||
|
|
||||||
dest_dir = join (self.ctx.dist_dir, "root", "python")
|
dest_dir = join(self.ctx.dist_dir, "root", "python")
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
|
|
||||||
#Add Architecture specific kivy path for 'import kivy' to PYTHONPATH
|
# Add Architecture specific kivy path for 'import kivy' to PYTHONPATH
|
||||||
arch_kivy_path = self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch)
|
arch_kivy_path = self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch)
|
||||||
build_env['PYTHONPATH'] = join( build_env['PYTHONPATH'],':',arch_kivy_path)
|
build_env['PYTHONPATH'] = join(build_env['PYTHONPATH'], ':', arch_kivy_path)
|
||||||
|
|
||||||
#Make sure you call kivent_core/modules/core/setup.py
|
# Make sure you call kivent_core/modules/core/setup.py
|
||||||
subdir_path = self.get_build_dir(str(arch),sub=True)
|
subdir_path = self.get_build_dir(str(arch), sub=True)
|
||||||
setup_path = join(subdir_path,"setup.py")
|
setup_path = join(subdir_path, "setup.py")
|
||||||
|
|
||||||
|
# Print out directories for sanity check
|
||||||
#Print out directories for sanity check
|
|
||||||
logger.info("ENVS", build_env)
|
logger.info("ENVS", build_env)
|
||||||
logger.info("ROOT",self.ctx.root_dir)
|
logger.info("ROOT", self.ctx.root_dir)
|
||||||
logger.info("BUILD",self.ctx.build_dir)
|
logger.info("BUILD", self.ctx.build_dir)
|
||||||
logger.info("INCLUDE", self.ctx.include_dir)
|
logger.info("INCLUDE", self.ctx.include_dir)
|
||||||
logger.info("DISTDIR", self.ctx.dist_dir)
|
logger.info("DISTDIR", self.ctx.dist_dir)
|
||||||
logger.info("ARCH KIVY LOC",self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch))
|
logger.info("ARCH KIVY LOC", self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch))
|
||||||
|
|
||||||
shprint(hostpython, setup_path, "build_ext", "install", _env=build_env)
|
shprint(hostpython, setup_path, "build_ext", "install", _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = KiventCoreRecipe()
|
recipe = KiventCoreRecipe()
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join, exists
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class CurlRecipe(Recipe):
|
class CurlRecipe(Recipe):
|
||||||
|
@ -11,7 +10,6 @@ class CurlRecipe(Recipe):
|
||||||
include_dir = "include"
|
include_dir = "include"
|
||||||
depends = ["openssl"]
|
depends = ["openssl"]
|
||||||
|
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
build_env = arch.get_env()
|
build_env = arch.get_env()
|
||||||
configure = sh.Command(join(self.build_dir, "configure"))
|
configure = sh.Command(join(self.build_dir, "configure"))
|
||||||
|
@ -27,6 +25,5 @@ class CurlRecipe(Recipe):
|
||||||
shprint(sh.make, "clean")
|
shprint(sh.make, "clean")
|
||||||
shprint(sh.make, self.ctx.concurrent_make)
|
shprint(sh.make, self.ctx.concurrent_make)
|
||||||
|
|
||||||
|
|
||||||
recipe = CurlRecipe()
|
recipe = CurlRecipe()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join, exists
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class JpegRecipe(Recipe):
|
class JpegRecipe(Recipe):
|
||||||
|
@ -16,7 +15,6 @@ class JpegRecipe(Recipe):
|
||||||
]
|
]
|
||||||
include_per_arch = True
|
include_per_arch = True
|
||||||
|
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
build_env = arch.get_env()
|
build_env = arch.get_env()
|
||||||
configure = sh.Command(join(self.build_dir, "configure"))
|
configure = sh.Command(join(self.build_dir, "configure"))
|
||||||
|
@ -31,6 +29,5 @@ class JpegRecipe(Recipe):
|
||||||
shprint(sh.make, "clean")
|
shprint(sh.make, "clean")
|
||||||
shprint(sh.make, self.ctx.concurrent_make)
|
shprint(sh.make, self.ctx.concurrent_make)
|
||||||
|
|
||||||
|
|
||||||
recipe = JpegRecipe()
|
recipe = JpegRecipe()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ from toolchain import Recipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
|
|
||||||
|
|
||||||
class PngRecipe(Recipe):
|
class PngRecipe(Recipe):
|
||||||
version = '1.6.26'
|
version = '1.6.26'
|
||||||
url = 'http://downloads.sourceforge.net/sourceforge/libpng/libpng-{version}.tar.gz'
|
url = 'http://downloads.sourceforge.net/sourceforge/libpng/libpng-{version}.tar.gz'
|
||||||
|
@ -23,4 +24,5 @@ class PngRecipe(Recipe):
|
||||||
shprint(sh.make, "clean")
|
shprint(sh.make, "clean")
|
||||||
shprint(sh.make, self.ctx.concurrent_make, _env=build_env)
|
shprint(sh.make, self.ctx.concurrent_make, _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = PngRecipe()
|
recipe = PngRecipe()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from toolchain import Recipe,shprint
|
from toolchain import Recipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import os
|
|
||||||
|
|
||||||
class LibZBarRecipe(Recipe):
|
class LibZBarRecipe(Recipe):
|
||||||
|
|
||||||
|
@ -28,11 +28,12 @@ class LibZBarRecipe(Recipe):
|
||||||
super(LibZBarRecipe, self).build_arch(arch)
|
super(LibZBarRecipe, self).build_arch(arch)
|
||||||
build_env = arch.get_env()
|
build_env = arch.get_env()
|
||||||
build_env["CFLAGS"] = " ".join([
|
build_env["CFLAGS"] = " ".join([
|
||||||
"-I{}".format(join(self.ctx.dist_dir, "build","libiconv",arch.arch)) +
|
"-I{}".format(join(self.ctx.dist_dir, "build", "libiconv", arch.arch)) +
|
||||||
" -arch {}".format(arch.arch),build_env['CFLAGS']
|
" -arch {}".format(arch.arch), build_env['CFLAGS']
|
||||||
])
|
])
|
||||||
shprint(sh.Command('autoreconf') ,'-vif')
|
shprint(sh.Command('autoreconf'), '-vif')
|
||||||
shprint(sh.Command('./configure'),
|
shprint(
|
||||||
|
sh.Command('./configure'),
|
||||||
"CC={}".format(build_env["CC"]),
|
"CC={}".format(build_env["CC"]),
|
||||||
"LD={}".format(build_env["LD"]),
|
"LD={}".format(build_env["LD"]),
|
||||||
"CFLAGS={}".format(build_env["CFLAGS"]),
|
"CFLAGS={}".format(build_env["CFLAGS"]),
|
||||||
|
@ -53,4 +54,5 @@ class LibZBarRecipe(Recipe):
|
||||||
shprint(sh.make, 'clean')
|
shprint(sh.make, 'clean')
|
||||||
shprint(sh.make, _env=build_env)
|
shprint(sh.make, _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = LibZBarRecipe()
|
recipe = LibZBarRecipe()
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class MarkupSafeRecipe(PythonRecipe):
|
class MarkupSafeRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
|
@ -20,9 +22,8 @@ class MarkupSafeRecipe(PythonRecipe):
|
||||||
shprint(cmd, "-i", "", "s/,.*Feature//g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/,.*Feature//g", "./setup.py", _env=build_env)
|
||||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||||
shprint(cmd, "-i", "", "/^speedups = Feature/,/^)$/s/.*//g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "/^speedups = Feature/,/^)$/s/.*//g", "./setup.py", _env=build_env)
|
||||||
shprint(cmd, "-i", "", "s/features\['speedups'\].*=.*speedups/pass/g", "./setup.py", _env=build_env)
|
shprint(cmd, "-i", "", "s/features\['speedups'\].*=.*speedups/pass/g", "./setup.py", _env=build_env) # noqa: W605
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = MarkupSafeRecipe()
|
recipe = MarkupSafeRecipe()
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from toolchain import CythonRecipe, shprint
|
from toolchain import CythonRecipe
|
||||||
from os.path import join
|
from os.path import join
|
||||||
from os import chdir, listdir
|
|
||||||
import sh
|
import sh
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
|
|
@ -32,4 +32,5 @@ class OpensslRecipe(Recipe):
|
||||||
shprint(sh.make, "clean")
|
shprint(sh.make, "clean")
|
||||||
shprint(sh.make, self.ctx.concurrent_make, "build_libs")
|
shprint(sh.make, self.ctx.concurrent_make, "build_libs")
|
||||||
|
|
||||||
|
|
||||||
recipe = OpensslRecipe()
|
recipe = OpensslRecipe()
|
||||||
|
|
|
@ -11,6 +11,5 @@ class PhotoRecipe(CythonRecipe):
|
||||||
def install(self):
|
def install(self):
|
||||||
self.install_python_package(name="photolibrary.so", is_dir=False)
|
self.install_python_package(name="photolibrary.so", is_dir=False)
|
||||||
|
|
||||||
|
|
||||||
recipe = PhotoRecipe()
|
recipe = PhotoRecipe()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ import fnmatch
|
||||||
class PillowRecipe(Recipe):
|
class PillowRecipe(Recipe):
|
||||||
version = "2.8.2"
|
version = "2.8.2"
|
||||||
url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-{version}.tar.gz"
|
url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-{version}.tar.gz"
|
||||||
#url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
# url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
||||||
library = "libpil.a"
|
library = "libpil.a"
|
||||||
depends = ["hostpython", "host_setuptools", "pkgresources", "freetype", "libjpeg", "python", "ios"]
|
depends = ["hostpython", "host_setuptools", "pkgresources", "freetype", "libjpeg", "python", "ios"]
|
||||||
pbx_libraries = ["libz", "libbz2"]
|
pbx_libraries = ["libz", "libbz2"]
|
||||||
|
@ -33,9 +33,9 @@ class PillowRecipe(Recipe):
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
self.apply_patch('pil_setup.patch')
|
self.apply_patch('pil_setup.patch')
|
||||||
build_env = self.get_pil_env(arch)
|
build_env = self.get_pil_env(arch)
|
||||||
#build_dir = self.get_build_dir(arch.arch)
|
# build_dir = self.get_build_dir(arch.arch)
|
||||||
hostpython = sh.Command(self.ctx.hostpython)
|
hostpython = sh.Command(self.ctx.hostpython)
|
||||||
#build_env["PYTHONHOME"] = hostpython
|
# build_env["PYTHONHOME"] = hostpython
|
||||||
# first try to generate .h
|
# first try to generate .h
|
||||||
shprint(hostpython, "setup.py", "build_ext", "-g",
|
shprint(hostpython, "setup.py", "build_ext", "-g",
|
||||||
_env=build_env)
|
_env=build_env)
|
||||||
|
@ -61,5 +61,5 @@ class PillowRecipe(Recipe):
|
||||||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||||
shprint(cmd, join(self.build_dir, "libpil.a"), *dirs)
|
shprint(cmd, join(self.build_dir, "libpil.a"), *dirs)
|
||||||
|
|
||||||
recipe = PillowRecipe()
|
|
||||||
|
|
||||||
|
recipe = PillowRecipe()
|
||||||
|
|
|
@ -56,5 +56,5 @@ class PillowRecipe(Recipe):
|
||||||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||||
shprint(cmd, join(self.build_dir, "libpillow.a"), *dirs)
|
shprint(cmd, join(self.build_dir, "libpillow.a"), *dirs)
|
||||||
|
|
||||||
recipe = PillowRecipe()
|
|
||||||
|
|
||||||
|
recipe = PillowRecipe()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from toolchain import Recipe, shprint
|
from toolchain import Recipe
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""
|
""" # noqa E902
|
||||||
Package resource API
|
Package resource API
|
||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ names being passed into the API.
|
||||||
|
|
||||||
The package resource API is designed to work with normal filesystem packages,
|
The package resource API is designed to work with normal filesystem packages,
|
||||||
.egg files, and unpacked .egg files. It can also work in a limited way with
|
.egg files, and unpacked .egg files. It can also work in a limited way with
|
||||||
.zip files and with custom PEP 302 loaders that support the ``get_data()``
|
.zip files and with custom PEP 302 loaders that support the ``get_data()``.
|
||||||
method.
|
method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -41,8 +41,7 @@ PY2 = not PY3
|
||||||
|
|
||||||
if PY3:
|
if PY3:
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
else:
|
||||||
if PY2:
|
|
||||||
from urlparse import urlparse, urlunparse
|
from urlparse import urlparse, urlunparse
|
||||||
|
|
||||||
if PY3:
|
if PY3:
|
||||||
|
@ -76,10 +75,12 @@ except ImportError:
|
||||||
|
|
||||||
_state_vars = {}
|
_state_vars = {}
|
||||||
|
|
||||||
|
|
||||||
def _declare_state(vartype, **kw):
|
def _declare_state(vartype, **kw):
|
||||||
globals().update(kw)
|
globals().update(kw)
|
||||||
_state_vars.update(dict.fromkeys(kw, vartype))
|
_state_vars.update(dict.fromkeys(kw, vartype))
|
||||||
|
|
||||||
|
|
||||||
def __getstate__():
|
def __getstate__():
|
||||||
state = {}
|
state = {}
|
||||||
g = globals()
|
g = globals()
|
||||||
|
@ -87,25 +88,31 @@ def __getstate__():
|
||||||
state[k] = g['_sget_'+v](g[k])
|
state[k] = g['_sget_'+v](g[k])
|
||||||
return state
|
return state
|
||||||
|
|
||||||
|
|
||||||
def __setstate__(state):
|
def __setstate__(state):
|
||||||
g = globals()
|
g = globals()
|
||||||
for k, v in state.items():
|
for k, v in state.items():
|
||||||
g['_sset_'+_state_vars[k]](k, g[k], v)
|
g['_sset_'+_state_vars[k]](k, g[k], v)
|
||||||
return state
|
return state
|
||||||
|
|
||||||
|
|
||||||
def _sget_dict(val):
|
def _sget_dict(val):
|
||||||
return val.copy()
|
return val.copy()
|
||||||
|
|
||||||
|
|
||||||
def _sset_dict(key, ob, state):
|
def _sset_dict(key, ob, state):
|
||||||
ob.clear()
|
ob.clear()
|
||||||
ob.update(state)
|
ob.update(state)
|
||||||
|
|
||||||
|
|
||||||
def _sget_object(val):
|
def _sget_object(val):
|
||||||
return val.__getstate__()
|
return val.__getstate__()
|
||||||
|
|
||||||
|
|
||||||
def _sset_object(key, ob, state):
|
def _sset_object(key, ob, state):
|
||||||
ob.__setstate__(state)
|
ob.__setstate__(state)
|
||||||
|
|
||||||
|
|
||||||
_sget_none = _sset_none = lambda *args: None
|
_sget_none = _sset_none = lambda *args: None
|
||||||
|
|
||||||
|
|
||||||
|
@ -132,6 +139,7 @@ def get_supported_platform():
|
||||||
pass
|
pass
|
||||||
return plat
|
return plat
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# Basic resource access and distribution/entry point discovery
|
# Basic resource access and distribution/entry point discovery
|
||||||
'require', 'run_script', 'get_provider', 'get_distribution',
|
'require', 'run_script', 'get_provider', 'get_distribution',
|
||||||
|
@ -175,19 +183,25 @@ __all__ = [
|
||||||
'run_main', 'AvailableDistributions',
|
'run_main', 'AvailableDistributions',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class ResolutionError(Exception):
|
class ResolutionError(Exception):
|
||||||
"""Abstract base for dependency resolution errors"""
|
"""Abstract base for dependency resolution errors"""
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self.__class__.__name__+repr(self.args)
|
return self.__class__.__name__+repr(self.args)
|
||||||
|
|
||||||
|
|
||||||
class VersionConflict(ResolutionError):
|
class VersionConflict(ResolutionError):
|
||||||
"""An already-installed version conflicts with the requested version"""
|
"""An already-installed version conflicts with the requested version"""
|
||||||
|
|
||||||
|
|
||||||
class DistributionNotFound(ResolutionError):
|
class DistributionNotFound(ResolutionError):
|
||||||
"""A requested distribution was not found"""
|
"""A requested distribution was not found"""
|
||||||
|
|
||||||
|
|
||||||
class UnknownExtra(ResolutionError):
|
class UnknownExtra(ResolutionError):
|
||||||
"""Distribution doesn't have an "extra feature" of the given name"""
|
"""Distribution doesn't have an "extra feature" of the given name"""
|
||||||
|
|
||||||
|
|
||||||
_provider_factories = {}
|
_provider_factories = {}
|
||||||
|
|
||||||
PY_MAJOR = sys.version[:3]
|
PY_MAJOR = sys.version[:3]
|
||||||
|
@ -197,6 +211,7 @@ SOURCE_DIST = 1
|
||||||
CHECKOUT_DIST = 0
|
CHECKOUT_DIST = 0
|
||||||
DEVELOP_DIST = -1
|
DEVELOP_DIST = -1
|
||||||
|
|
||||||
|
|
||||||
def register_loader_type(loader_type, provider_factory):
|
def register_loader_type(loader_type, provider_factory):
|
||||||
"""Register `provider_factory` to make providers for `loader_type`
|
"""Register `provider_factory` to make providers for `loader_type`
|
||||||
|
|
||||||
|
@ -206,6 +221,7 @@ def register_loader_type(loader_type, provider_factory):
|
||||||
"""
|
"""
|
||||||
_provider_factories[loader_type] = provider_factory
|
_provider_factories[loader_type] = provider_factory
|
||||||
|
|
||||||
|
|
||||||
def get_provider(moduleOrReq):
|
def get_provider(moduleOrReq):
|
||||||
"""Return an IResourceProvider for the named module or requirement"""
|
"""Return an IResourceProvider for the named module or requirement"""
|
||||||
if isinstance(moduleOrReq, Requirement):
|
if isinstance(moduleOrReq, Requirement):
|
||||||
|
@ -218,6 +234,7 @@ def get_provider(moduleOrReq):
|
||||||
loader = getattr(module, '__loader__', None)
|
loader = getattr(module, '__loader__', None)
|
||||||
return _find_adapter(_provider_factories, loader)(module)
|
return _find_adapter(_provider_factories, loader)(module)
|
||||||
|
|
||||||
|
|
||||||
def _macosx_vers(_cache=[]):
|
def _macosx_vers(_cache=[]):
|
||||||
if not _cache:
|
if not _cache:
|
||||||
version = platform.mac_ver()[0]
|
version = platform.mac_ver()[0]
|
||||||
|
@ -233,9 +250,11 @@ def _macosx_vers(_cache=[]):
|
||||||
_cache.append(version.split('.'))
|
_cache.append(version.split('.'))
|
||||||
return _cache[0]
|
return _cache[0]
|
||||||
|
|
||||||
|
|
||||||
def _macosx_arch(machine):
|
def _macosx_arch(machine):
|
||||||
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
|
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
|
||||||
|
|
||||||
|
|
||||||
def get_build_platform():
|
def get_build_platform():
|
||||||
"""Return this platform's string for platform-specific distributions
|
"""Return this platform's string for platform-specific distributions
|
||||||
|
|
||||||
|
@ -261,6 +280,7 @@ def get_build_platform():
|
||||||
pass
|
pass
|
||||||
return plat
|
return plat
|
||||||
|
|
||||||
|
|
||||||
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
|
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
|
||||||
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
|
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
|
||||||
# XXX backward compat
|
# XXX backward compat
|
||||||
|
@ -274,7 +294,7 @@ def compatible_platforms(provided, required):
|
||||||
|
|
||||||
XXX Needs compatibility checks for Linux and other unixy OSes.
|
XXX Needs compatibility checks for Linux and other unixy OSes.
|
||||||
"""
|
"""
|
||||||
if provided is None or required is None or provided==required:
|
if provided is None or required is None or provided == required:
|
||||||
# easy case
|
# easy case
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -321,9 +341,11 @@ def run_script(dist_spec, script_name):
|
||||||
ns['__name__'] = name
|
ns['__name__'] = name
|
||||||
require(dist_spec)[0].run_script(script_name, ns)
|
require(dist_spec)[0].run_script(script_name, ns)
|
||||||
|
|
||||||
|
|
||||||
# backward compatibility
|
# backward compatibility
|
||||||
run_main = run_script
|
run_main = run_script
|
||||||
|
|
||||||
|
|
||||||
def get_distribution(dist):
|
def get_distribution(dist):
|
||||||
"""Return a current distribution object for a Requirement or string"""
|
"""Return a current distribution object for a Requirement or string"""
|
||||||
if isinstance(dist, string_types):
|
if isinstance(dist, string_types):
|
||||||
|
@ -334,14 +356,17 @@ def get_distribution(dist):
|
||||||
raise TypeError("Expected string, Requirement, or Distribution", dist)
|
raise TypeError("Expected string, Requirement, or Distribution", dist)
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
|
|
||||||
def load_entry_point(dist, group, name):
|
def load_entry_point(dist, group, name):
|
||||||
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
|
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
|
||||||
return get_distribution(dist).load_entry_point(group, name)
|
return get_distribution(dist).load_entry_point(group, name)
|
||||||
|
|
||||||
|
|
||||||
def get_entry_map(dist, group=None):
|
def get_entry_map(dist, group=None):
|
||||||
"""Return the entry point map for `group`, or the full entry map"""
|
"""Return the entry point map for `group`, or the full entry map"""
|
||||||
return get_distribution(dist).get_entry_map(group)
|
return get_distribution(dist).get_entry_map(group)
|
||||||
|
|
||||||
|
|
||||||
def get_entry_info(dist, group, name):
|
def get_entry_info(dist, group, name):
|
||||||
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
||||||
return get_distribution(dist).get_entry_info(group, name)
|
return get_distribution(dist).get_entry_info(group, name)
|
||||||
|
@ -530,7 +555,7 @@ class WorkingSet(object):
|
||||||
|
|
||||||
for key in self.entry_keys[item]:
|
for key in self.entry_keys[item]:
|
||||||
if key not in seen:
|
if key not in seen:
|
||||||
seen[key]=1
|
seen[key] = 1
|
||||||
yield self.by_key[key]
|
yield self.by_key[key]
|
||||||
|
|
||||||
def add(self, dist, entry=None, insert=True, replace=False):
|
def add(self, dist, entry=None, insert=True, replace=False):
|
||||||
|
@ -550,8 +575,8 @@ class WorkingSet(object):
|
||||||
|
|
||||||
if entry is None:
|
if entry is None:
|
||||||
entry = dist.location
|
entry = dist.location
|
||||||
keys = self.entry_keys.setdefault(entry,[])
|
keys = self.entry_keys.setdefault(entry, [])
|
||||||
keys2 = self.entry_keys.setdefault(dist.location,[])
|
keys2 = self.entry_keys.setdefault(dist.location, [])
|
||||||
if not replace and dist.key in self.by_key:
|
if not replace and dist.key in self.by_key:
|
||||||
# ignore hidden distros
|
# ignore hidden distros
|
||||||
return
|
return
|
||||||
|
@ -617,10 +642,6 @@ class WorkingSet(object):
|
||||||
ws = WorkingSet([])
|
ws = WorkingSet([])
|
||||||
dist = best[req.key] = env.best_match(req, ws, installer)
|
dist = best[req.key] = env.best_match(req, ws, installer)
|
||||||
if dist is None:
|
if dist is None:
|
||||||
#msg = ("The '%s' distribution was not found on this "
|
|
||||||
# "system, and is required by this application.")
|
|
||||||
#raise DistributionNotFound(msg % req)
|
|
||||||
|
|
||||||
# unfortunately, zc.buildout uses a str(err)
|
# unfortunately, zc.buildout uses a str(err)
|
||||||
# to get the name of the distribution here..
|
# to get the name of the distribution here..
|
||||||
raise DistributionNotFound(req)
|
raise DistributionNotFound(req)
|
||||||
|
@ -805,7 +826,7 @@ class Environment(object):
|
||||||
is returned.
|
is returned.
|
||||||
"""
|
"""
|
||||||
return (self.python is None or dist.py_version is None
|
return (self.python is None or dist.py_version is None
|
||||||
or dist.py_version==self.python) \
|
or dist.py_version == self.python) \
|
||||||
and compatible_platforms(dist.platform, self.platform)
|
and compatible_platforms(dist.platform, self.platform)
|
||||||
|
|
||||||
def remove(self, dist):
|
def remove(self, dist):
|
||||||
|
@ -1012,7 +1033,7 @@ variable to point to an accessible directory.
|
||||||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
||||||
try:
|
try:
|
||||||
_bypass_ensure_directory(target_path)
|
_bypass_ensure_directory(target_path)
|
||||||
except:
|
except Exception:
|
||||||
self.extraction_error()
|
self.extraction_error()
|
||||||
|
|
||||||
self._warn_unsafe_extraction_path(extract_path)
|
self._warn_unsafe_extraction_path(extract_path)
|
||||||
|
@ -1103,6 +1124,7 @@ variable to point to an accessible directory.
|
||||||
"""
|
"""
|
||||||
# XXX
|
# XXX
|
||||||
|
|
||||||
|
|
||||||
def get_default_cache():
|
def get_default_cache():
|
||||||
"""Determine the default cache location
|
"""Determine the default cache location
|
||||||
|
|
||||||
|
@ -1115,7 +1137,7 @@ def get_default_cache():
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if os.name!='nt':
|
if os.name != 'nt':
|
||||||
return os.path.expanduser('~/.python-eggs')
|
return os.path.expanduser('~/.python-eggs')
|
||||||
|
|
||||||
# XXX this may be locale-specific!
|
# XXX this may be locale-specific!
|
||||||
|
@ -1124,7 +1146,7 @@ def get_default_cache():
|
||||||
# best option, should be locale-safe
|
# best option, should be locale-safe
|
||||||
(('APPDATA',), None),
|
(('APPDATA',), None),
|
||||||
(('USERPROFILE',), app_data),
|
(('USERPROFILE',), app_data),
|
||||||
(('HOMEDRIVE','HOMEPATH'), app_data),
|
(('HOMEDRIVE', 'HOMEPATH'), app_data),
|
||||||
(('HOMEPATH',), app_data),
|
(('HOMEPATH',), app_data),
|
||||||
(('HOME',), None),
|
(('HOME',), None),
|
||||||
# 95/98/ME
|
# 95/98/ME
|
||||||
|
@ -1147,6 +1169,7 @@ def get_default_cache():
|
||||||
"Please set the PYTHON_EGG_CACHE enviroment variable"
|
"Please set the PYTHON_EGG_CACHE enviroment variable"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def safe_name(name):
|
def safe_name(name):
|
||||||
"""Convert an arbitrary string to a standard distribution name
|
"""Convert an arbitrary string to a standard distribution name
|
||||||
|
|
||||||
|
@ -1161,7 +1184,7 @@ def safe_version(version):
|
||||||
Spaces become dots, and all other non-alphanumeric characters become
|
Spaces become dots, and all other non-alphanumeric characters become
|
||||||
dashes, with runs of multiple dashes condensed to a single dash.
|
dashes, with runs of multiple dashes condensed to a single dash.
|
||||||
"""
|
"""
|
||||||
version = version.replace(' ','.')
|
version = version.replace(' ', '.')
|
||||||
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1179,7 +1202,7 @@ def to_filename(name):
|
||||||
|
|
||||||
Any '-' characters are currently replaced with '_'.
|
Any '-' characters are currently replaced with '_'.
|
||||||
"""
|
"""
|
||||||
return name.replace('-','_')
|
return name.replace('-', '_')
|
||||||
|
|
||||||
|
|
||||||
class MarkerEvaluation(object):
|
class MarkerEvaluation(object):
|
||||||
|
@ -1325,11 +1348,12 @@ class MarkerEvaluation(object):
|
||||||
if 'parser' not in globals():
|
if 'parser' not in globals():
|
||||||
# Fall back to less-complete _markerlib implementation if 'parser' module
|
# Fall back to less-complete _markerlib implementation if 'parser' module
|
||||||
# is not available.
|
# is not available.
|
||||||
evaluate_marker = _markerlib_evaluate
|
evaluate_marker = _markerlib_evaluate # noqa: F811
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def interpret(cls, nodelist):
|
def interpret(cls, nodelist):
|
||||||
while len(nodelist)==2: nodelist = nodelist[1]
|
while len(nodelist) == 2:
|
||||||
|
nodelist = nodelist[1]
|
||||||
try:
|
try:
|
||||||
op = cls.get_op(nodelist[0])
|
op = cls.get_op(nodelist[0])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -1338,16 +1362,17 @@ class MarkerEvaluation(object):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def evaluate(cls, nodelist):
|
def evaluate(cls, nodelist):
|
||||||
while len(nodelist)==2: nodelist = nodelist[1]
|
while len(nodelist) == 2:
|
||||||
|
nodelist = nodelist[1]
|
||||||
kind = nodelist[0]
|
kind = nodelist[0]
|
||||||
name = nodelist[1]
|
name = nodelist[1]
|
||||||
if kind==token.NAME:
|
if kind == token.NAME:
|
||||||
try:
|
try:
|
||||||
op = cls.values[name]
|
op = cls.values[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SyntaxError("Unknown name %r" % name)
|
raise SyntaxError("Unknown name %r" % name)
|
||||||
return op()
|
return op()
|
||||||
if kind==token.STRING:
|
if kind == token.STRING:
|
||||||
s = nodelist[1]
|
s = nodelist[1]
|
||||||
if not cls._safe_string(s):
|
if not cls._safe_string(s):
|
||||||
raise SyntaxError(
|
raise SyntaxError(
|
||||||
|
@ -1365,9 +1390,11 @@ class MarkerEvaluation(object):
|
||||||
'\\' not in cand
|
'\\' not in cand
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
invalid_marker = MarkerEvaluation.is_invalid_marker
|
invalid_marker = MarkerEvaluation.is_invalid_marker
|
||||||
evaluate_marker = MarkerEvaluation.evaluate_marker
|
evaluate_marker = MarkerEvaluation.evaluate_marker
|
||||||
|
|
||||||
|
|
||||||
class NullProvider:
|
class NullProvider:
|
||||||
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
|
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
|
||||||
|
|
||||||
|
@ -1439,7 +1466,7 @@ class NullProvider:
|
||||||
cache[script_filename] = (
|
cache[script_filename] = (
|
||||||
len(script_text), 0, script_text.split('\n'), script_filename
|
len(script_text), 0, script_text.split('\n'), script_filename
|
||||||
)
|
)
|
||||||
script_code = compile(script_text, script_filename,'exec')
|
script_code = compile(script_text, script_filename, 'exec')
|
||||||
exec(script_code, namespace, namespace)
|
exec(script_code, namespace, namespace)
|
||||||
|
|
||||||
def _has(self, path):
|
def _has(self, path):
|
||||||
|
@ -1465,13 +1492,7 @@ class NullProvider:
|
||||||
def _get(self, path):
|
def _get(self, path):
|
||||||
if hasattr(self.loader, 'get_data'):
|
if hasattr(self.loader, 'get_data'):
|
||||||
return self.loader.get_data(path)
|
return self.loader.get_data(path)
|
||||||
raise NotImplementedError(
|
raise NotImplementedError( # noqa: F821
|
||||||
"Can't perform this operation for loaders without 'get_data()'"
|
|
||||||
)
|
|
||||||
|
|
||||||
register_loader_type(object, NullProvider)
|
|
||||||
|
|
||||||
|
|
||||||
class EggProvider(NullProvider):
|
class EggProvider(NullProvider):
|
||||||
"""Provider based on a virtual filesystem"""
|
"""Provider based on a virtual filesystem"""
|
||||||
|
|
||||||
|
@ -1484,7 +1505,7 @@ class EggProvider(NullProvider):
|
||||||
# of multiple eggs; that's why we use module_path instead of .archive
|
# of multiple eggs; that's why we use module_path instead of .archive
|
||||||
path = self.module_path
|
path = self.module_path
|
||||||
old = None
|
old = None
|
||||||
while path!=old:
|
while path != old:
|
||||||
if path.lower().endswith('.egg'):
|
if path.lower().endswith('.egg'):
|
||||||
self.egg_name = os.path.basename(path)
|
self.egg_name = os.path.basename(path)
|
||||||
self.egg_info = os.path.join(path, 'EGG-INFO')
|
self.egg_info = os.path.join(path, 'EGG-INFO')
|
||||||
|
@ -1493,6 +1514,7 @@ class EggProvider(NullProvider):
|
||||||
old = path
|
old = path
|
||||||
path, base = os.path.split(path)
|
path, base = os.path.split(path)
|
||||||
|
|
||||||
|
|
||||||
class DefaultProvider(EggProvider):
|
class DefaultProvider(EggProvider):
|
||||||
"""Provides access to package resources in the filesystem"""
|
"""Provides access to package resources in the filesystem"""
|
||||||
|
|
||||||
|
@ -1512,6 +1534,7 @@ class DefaultProvider(EggProvider):
|
||||||
with open(path, 'rb') as stream:
|
with open(path, 'rb') as stream:
|
||||||
return stream.read()
|
return stream.read()
|
||||||
|
|
||||||
|
|
||||||
register_loader_type(type(None), DefaultProvider)
|
register_loader_type(type(None), DefaultProvider)
|
||||||
|
|
||||||
if importlib_bootstrap is not None:
|
if importlib_bootstrap is not None:
|
||||||
|
@ -1522,13 +1545,14 @@ class EmptyProvider(NullProvider):
|
||||||
"""Provider that returns nothing for all requests"""
|
"""Provider that returns nothing for all requests"""
|
||||||
|
|
||||||
_isdir = _has = lambda self, path: False
|
_isdir = _has = lambda self, path: False
|
||||||
_get = lambda self, path: ''
|
_get = lambda self, path: '' # noqqa: E731
|
||||||
_listdir = lambda self, path: []
|
_listdir = lambda self, path: [] # noqq: E731
|
||||||
module_path = None
|
module_path = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
empty_provider = EmptyProvider()
|
empty_provider = EmptyProvider()
|
||||||
|
|
||||||
|
|
||||||
|
@ -1694,7 +1718,7 @@ class ZipProvider(EggProvider):
|
||||||
# so proceed.
|
# so proceed.
|
||||||
return real_path
|
return real_path
|
||||||
# Windows, del old file and retry
|
# Windows, del old file and retry
|
||||||
elif os.name=='nt':
|
elif os.name == 'nt':
|
||||||
unlink(real_path)
|
unlink(real_path)
|
||||||
rename(tmpnam, real_path)
|
rename(tmpnam, real_path)
|
||||||
return real_path
|
return real_path
|
||||||
|
@ -1714,7 +1738,7 @@ class ZipProvider(EggProvider):
|
||||||
if not os.path.isfile(file_path):
|
if not os.path.isfile(file_path):
|
||||||
return False
|
return False
|
||||||
stat = os.stat(file_path)
|
stat = os.stat(file_path)
|
||||||
if stat.st_size!=size or stat.st_mtime!=timestamp:
|
if stat.st_size != size or stat.st_mtime != timestamp:
|
||||||
return False
|
return False
|
||||||
# check that the contents match
|
# check that the contents match
|
||||||
zip_contents = self.loader.get_data(zip_path)
|
zip_contents = self.loader.get_data(zip_path)
|
||||||
|
@ -1764,6 +1788,7 @@ class ZipProvider(EggProvider):
|
||||||
def _resource_to_zip(self, resource_name):
|
def _resource_to_zip(self, resource_name):
|
||||||
return self._zipinfo_name(self._fn(self.module_path, resource_name))
|
return self._zipinfo_name(self._fn(self.module_path, resource_name))
|
||||||
|
|
||||||
|
|
||||||
register_loader_type(zipimport.zipimporter, ZipProvider)
|
register_loader_type(zipimport.zipimporter, ZipProvider)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1783,11 +1808,11 @@ class FileMetadata(EmptyProvider):
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
||||||
def has_metadata(self, name):
|
def has_metadata(self, name):
|
||||||
return name=='PKG-INFO'
|
return name == 'PKG-INFO'
|
||||||
|
|
||||||
def get_metadata(self, name):
|
def get_metadata(self, name):
|
||||||
if name=='PKG-INFO':
|
if name == 'PKG-INFO':
|
||||||
with open(self.path,'rU') as f:
|
with open(self.path, 'rU') as f:
|
||||||
metadata = f.read()
|
metadata = f.read()
|
||||||
return metadata
|
return metadata
|
||||||
raise KeyError("No metadata except PKG-INFO is available")
|
raise KeyError("No metadata except PKG-INFO is available")
|
||||||
|
@ -1835,7 +1860,9 @@ class EggMetadata(ZipProvider):
|
||||||
self.module_path = importer.archive
|
self.module_path = importer.archive
|
||||||
self._setup_prefix()
|
self._setup_prefix()
|
||||||
|
|
||||||
_declare_state('dict', _distribution_finders = {})
|
|
||||||
|
_declare_state('dict', _distribution_finders={})
|
||||||
|
|
||||||
|
|
||||||
def register_finder(importer_type, distribution_finder):
|
def register_finder(importer_type, distribution_finder):
|
||||||
"""Register `distribution_finder` to find distributions in sys.path items
|
"""Register `distribution_finder` to find distributions in sys.path items
|
||||||
|
@ -1844,15 +1871,16 @@ def register_finder(importer_type, distribution_finder):
|
||||||
handler), and `distribution_finder` is a callable that, passed a path
|
handler), and `distribution_finder` is a callable that, passed a path
|
||||||
item and the importer instance, yields ``Distribution`` instances found on
|
item and the importer instance, yields ``Distribution`` instances found on
|
||||||
that path item. See ``pkg_resources.find_on_path`` for an example."""
|
that path item. See ``pkg_resources.find_on_path`` for an example."""
|
||||||
_distribution_finders[importer_type] = distribution_finder
|
_distribution_finders[importer_type] = distribution_finder # noqa: F821
|
||||||
|
|
||||||
|
|
||||||
def find_distributions(path_item, only=False):
|
def find_distributions(path_item, only=False):
|
||||||
"""Yield distributions accessible via `path_item`"""
|
"""Yield distributions accessible via `path_item`"""
|
||||||
importer = get_importer(path_item)
|
importer = get_importer(path_item)
|
||||||
finder = _find_adapter(_distribution_finders, importer)
|
finder = _find_adapter(_distribution_finders, importer) # noqa: F821
|
||||||
return finder(importer, path_item, only)
|
return finder(importer, path_item, only)
|
||||||
|
|
||||||
|
|
||||||
def find_eggs_in_zip(importer, path_item, only=False):
|
def find_eggs_in_zip(importer, path_item, only=False):
|
||||||
"""
|
"""
|
||||||
Find eggs in zip files; possibly multiple nested eggs.
|
Find eggs in zip files; possibly multiple nested eggs.
|
||||||
|
@ -1873,12 +1901,17 @@ def find_eggs_in_zip(importer, path_item, only=False):
|
||||||
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
|
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
|
||||||
yield dist
|
yield dist
|
||||||
|
|
||||||
|
|
||||||
register_finder(zipimport.zipimporter, find_eggs_in_zip)
|
register_finder(zipimport.zipimporter, find_eggs_in_zip)
|
||||||
|
|
||||||
|
|
||||||
def find_nothing(importer, path_item, only=False):
|
def find_nothing(importer, path_item, only=False):
|
||||||
return ()
|
return ()
|
||||||
|
|
||||||
|
|
||||||
register_finder(object, find_nothing)
|
register_finder(object, find_nothing)
|
||||||
|
|
||||||
|
|
||||||
def find_on_path(importer, path_item, only=False):
|
def find_on_path(importer, path_item, only=False):
|
||||||
"""Yield distributions accessible on a sys.path directory"""
|
"""Yield distributions accessible on a sys.path directory"""
|
||||||
path_item = _normalize_cached(path_item)
|
path_item = _normalize_cached(path_item)
|
||||||
|
@ -1888,7 +1921,7 @@ def find_on_path(importer, path_item, only=False):
|
||||||
# unpacked egg
|
# unpacked egg
|
||||||
yield Distribution.from_filename(
|
yield Distribution.from_filename(
|
||||||
path_item, metadata=PathMetadata(
|
path_item, metadata=PathMetadata(
|
||||||
path_item, os.path.join(path_item,'EGG-INFO')
|
path_item, os.path.join(path_item, 'EGG-INFO')
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -1920,8 +1953,11 @@ def find_on_path(importer, path_item, only=False):
|
||||||
for item in dists:
|
for item in dists:
|
||||||
yield item
|
yield item
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
register_finder(pkgutil.ImpImporter, find_on_path)
|
register_finder(pkgutil.ImpImporter, find_on_path)
|
||||||
|
|
||||||
|
|
||||||
if importlib_bootstrap is not None:
|
if importlib_bootstrap is not None:
|
||||||
register_finder(importlib_bootstrap.FileFinder, find_on_path)
|
register_finder(importlib_bootstrap.FileFinder, find_on_path)
|
||||||
|
|
||||||
|
@ -1944,7 +1980,8 @@ def register_namespace_handler(importer_type, namespace_handler):
|
||||||
equivalent subpath. For an example namespace handler, see
|
equivalent subpath. For an example namespace handler, see
|
||||||
``pkg_resources.file_ns_handler``.
|
``pkg_resources.file_ns_handler``.
|
||||||
"""
|
"""
|
||||||
_namespace_handlers[importer_type] = namespace_handler
|
_namespace_handlers[importer_type] = namespace_handler # noqa: F821
|
||||||
|
|
||||||
|
|
||||||
def _handle_ns(packageName, path_item):
|
def _handle_ns(packageName, path_item):
|
||||||
"""Ensure that named package includes a subpath of path_item (if needed)"""
|
"""Ensure that named package includes a subpath of path_item (if needed)"""
|
||||||
|
@ -1960,9 +1997,9 @@ def _handle_ns(packageName, path_item):
|
||||||
module = sys.modules[packageName] = imp.new_module(packageName)
|
module = sys.modules[packageName] = imp.new_module(packageName)
|
||||||
module.__path__ = []
|
module.__path__ = []
|
||||||
_set_parent_ns(packageName)
|
_set_parent_ns(packageName)
|
||||||
elif not hasattr(module,'__path__'):
|
elif not hasattr(module, '__path__'):
|
||||||
raise TypeError("Not a package:", packageName)
|
raise TypeError("Not a package:", packageName)
|
||||||
handler = _find_adapter(_namespace_handlers, importer)
|
handler = _find_adapter(_namespace_handlers, importer) # noqa: F821
|
||||||
subpath = handler(importer, path_item, packageName, module)
|
subpath = handler(importer, path_item, packageName, module)
|
||||||
if subpath is not None:
|
if subpath is not None:
|
||||||
path = module.__path__
|
path = module.__path__
|
||||||
|
@ -1973,19 +2010,20 @@ def _handle_ns(packageName, path_item):
|
||||||
module.__path__.append(path_item)
|
module.__path__.append(path_item)
|
||||||
return subpath
|
return subpath
|
||||||
|
|
||||||
|
|
||||||
def declare_namespace(packageName):
|
def declare_namespace(packageName):
|
||||||
"""Declare that package 'packageName' is a namespace package"""
|
"""Declare that package 'packageName' is a namespace package"""
|
||||||
|
|
||||||
imp.acquire_lock()
|
imp.acquire_lock()
|
||||||
try:
|
try:
|
||||||
if packageName in _namespace_packages:
|
if packageName in _namespace_packages: # noqa: F821
|
||||||
return
|
return
|
||||||
|
|
||||||
path, parent = sys.path, None
|
path, parent = sys.path, None
|
||||||
if '.' in packageName:
|
if '.' in packageName:
|
||||||
parent = '.'.join(packageName.split('.')[:-1])
|
parent = '.'.join(packageName.split('.')[:-1])
|
||||||
declare_namespace(parent)
|
declare_namespace(parent)
|
||||||
if parent not in _namespace_packages:
|
if parent not in _namespace_packages: # noqa: F821
|
||||||
__import__(parent)
|
__import__(parent)
|
||||||
try:
|
try:
|
||||||
path = sys.modules[parent].__path__
|
path = sys.modules[parent].__path__
|
||||||
|
@ -1994,8 +2032,8 @@ def declare_namespace(packageName):
|
||||||
|
|
||||||
# Track what packages are namespaces, so when new path items are added,
|
# Track what packages are namespaces, so when new path items are added,
|
||||||
# they can be updated
|
# they can be updated
|
||||||
_namespace_packages.setdefault(parent,[]).append(packageName)
|
_namespace_packages.setdefault(parent, []).append(packageName) # noqa: F821
|
||||||
_namespace_packages.setdefault(packageName,[])
|
_namespace_packages.setdefault(packageName, []) # noqa: F821
|
||||||
|
|
||||||
for path_item in path:
|
for path_item in path:
|
||||||
# Ensure all the parent's path items are reflected in the child,
|
# Ensure all the parent's path items are reflected in the child,
|
||||||
|
@ -2005,29 +2043,32 @@ def declare_namespace(packageName):
|
||||||
finally:
|
finally:
|
||||||
imp.release_lock()
|
imp.release_lock()
|
||||||
|
|
||||||
|
|
||||||
def fixup_namespace_packages(path_item, parent=None):
|
def fixup_namespace_packages(path_item, parent=None):
|
||||||
"""Ensure that previously-declared namespace packages include path_item"""
|
"""Ensure that previously-declared namespace packages include path_item"""
|
||||||
imp.acquire_lock()
|
imp.acquire_lock()
|
||||||
try:
|
try:
|
||||||
for package in _namespace_packages.get(parent,()):
|
for package in _namespace_packages.get(parent, ()): # noqa: F821
|
||||||
subpath = _handle_ns(package, path_item)
|
subpath = _handle_ns(package, path_item)
|
||||||
if subpath:
|
if subpath:
|
||||||
fixup_namespace_packages(subpath, package)
|
fixup_namespace_packages(subpath, package)
|
||||||
finally:
|
finally:
|
||||||
imp.release_lock()
|
imp.release_lock()
|
||||||
|
|
||||||
|
|
||||||
def file_ns_handler(importer, path_item, packageName, module):
|
def file_ns_handler(importer, path_item, packageName, module):
|
||||||
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
|
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
|
||||||
|
|
||||||
subpath = os.path.join(path_item, packageName.split('.')[-1])
|
subpath = os.path.join(path_item, packageName.split('.')[-1])
|
||||||
normalized = _normalize_cached(subpath)
|
normalized = _normalize_cached(subpath)
|
||||||
for item in module.__path__:
|
for item in module.__path__:
|
||||||
if _normalize_cached(item)==normalized:
|
if _normalize_cached(item) == normalized:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# Only return the path if it's not already there
|
# Only return the path if it's not already there
|
||||||
return subpath
|
return subpath
|
||||||
|
|
||||||
|
|
||||||
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
|
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
|
||||||
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
|
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
|
||||||
|
|
||||||
|
@ -2038,6 +2079,7 @@ if importlib_bootstrap is not None:
|
||||||
def null_ns_handler(importer, path_item, packageName, module):
|
def null_ns_handler(importer, path_item, packageName, module):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
register_namespace_handler(object, null_ns_handler)
|
register_namespace_handler(object, null_ns_handler)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2045,6 +2087,7 @@ def normalize_path(filename):
|
||||||
"""Normalize a file/dir name for comparison purposes"""
|
"""Normalize a file/dir name for comparison purposes"""
|
||||||
return os.path.normcase(os.path.realpath(filename))
|
return os.path.normcase(os.path.realpath(filename))
|
||||||
|
|
||||||
|
|
||||||
def _normalize_cached(filename, _cache={}):
|
def _normalize_cached(filename, _cache={}):
|
||||||
try:
|
try:
|
||||||
return _cache[filename]
|
return _cache[filename]
|
||||||
|
@ -2052,6 +2095,7 @@ def _normalize_cached(filename, _cache={}):
|
||||||
_cache[filename] = result = normalize_path(filename)
|
_cache[filename] = result = normalize_path(filename)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _set_parent_ns(packageName):
|
def _set_parent_ns(packageName):
|
||||||
parts = packageName.split('.')
|
parts = packageName.split('.')
|
||||||
name = parts.pop()
|
name = parts.pop()
|
||||||
|
@ -2073,6 +2117,7 @@ def yield_lines(strs):
|
||||||
for s in yield_lines(ss):
|
for s in yield_lines(ss):
|
||||||
yield s
|
yield s
|
||||||
|
|
||||||
|
|
||||||
# whitespace and comment
|
# whitespace and comment
|
||||||
LINE_END = re.compile(r"\s*(#.*)?$").match
|
LINE_END = re.compile(r"\s*(#.*)?$").match
|
||||||
# line continuation
|
# line continuation
|
||||||
|
@ -2093,22 +2138,24 @@ EGG_NAME = re.compile(
|
||||||
).match
|
).match
|
||||||
|
|
||||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
||||||
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
|
replace = {'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@'}.get
|
||||||
|
|
||||||
|
|
||||||
def _parse_version_parts(s):
|
def _parse_version_parts(s):
|
||||||
for part in component_re.split(s):
|
for part in component_re.split(s):
|
||||||
part = replace(part, part)
|
part = replace(part, part)
|
||||||
if not part or part=='.':
|
if not part or part == '.':
|
||||||
continue
|
continue
|
||||||
if part[:1] in '0123456789':
|
if part[:1] in '0123456789':
|
||||||
# pad for numeric comparison
|
# pad for numeric comparison
|
||||||
yield part.zfill(8)
|
yield part.zfill(8)
|
||||||
else:
|
else:
|
||||||
yield '*'+part
|
yield '*' + part
|
||||||
|
|
||||||
# ensure that alpha/beta/candidate are before final
|
# ensure that alpha/beta/candidate are before final
|
||||||
yield '*final'
|
yield '*final'
|
||||||
|
|
||||||
|
|
||||||
def parse_version(s):
|
def parse_version(s):
|
||||||
"""Convert a version string to a chronologically-sortable key
|
"""Convert a version string to a chronologically-sortable key
|
||||||
|
|
||||||
|
@ -2148,7 +2195,7 @@ def parse_version(s):
|
||||||
while parts and parts[-1] == '*final-':
|
while parts and parts[-1] == '*final-':
|
||||||
parts.pop()
|
parts.pop()
|
||||||
# remove trailing zeros from each series of numeric parts
|
# remove trailing zeros from each series of numeric parts
|
||||||
while parts and parts[-1]=='00000000':
|
while parts and parts[-1] == '00000000':
|
||||||
parts.pop()
|
parts.pop()
|
||||||
parts.append(part)
|
parts.append(part)
|
||||||
return tuple(parts)
|
return tuple(parts)
|
||||||
|
@ -2237,7 +2284,7 @@ class EntryPoint(object):
|
||||||
ep = cls.parse(line, dist)
|
ep = cls.parse(line, dist)
|
||||||
if ep.name in this:
|
if ep.name in this:
|
||||||
raise ValueError("Duplicate entry point", group, ep.name)
|
raise ValueError("Duplicate entry point", group, ep.name)
|
||||||
this[ep.name]=ep
|
this[ep.name] = ep
|
||||||
return this
|
return this
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -2286,7 +2333,7 @@ class Distribution(object):
|
||||||
self._provider = metadata or empty_provider
|
self._provider = metadata or empty_provider
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_location(cls, location, basename, metadata=None,**kw):
|
def from_location(cls, location, basename, metadata=None, **kw):
|
||||||
project_name, version, py_version, platform = [None]*4
|
project_name, version, py_version, platform = [None]*4
|
||||||
basename, ext = os.path.splitext(basename)
|
basename, ext = os.path.splitext(basename)
|
||||||
if ext.lower() in _distributionImpl:
|
if ext.lower() in _distributionImpl:
|
||||||
|
@ -2294,7 +2341,7 @@ class Distribution(object):
|
||||||
match = EGG_NAME(basename)
|
match = EGG_NAME(basename)
|
||||||
if match:
|
if match:
|
||||||
project_name, version, py_version, platform = match.group(
|
project_name, version, py_version, platform = match.group(
|
||||||
'name','ver','pyver','plat'
|
'name', 'ver', 'pyver', 'plat'
|
||||||
)
|
)
|
||||||
cls = _distributionImpl[ext.lower()]
|
cls = _distributionImpl[ext.lower()]
|
||||||
return cls(
|
return cls(
|
||||||
|
@ -2364,7 +2411,7 @@ class Distribution(object):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
for line in self._get_metadata(self.PKG_INFO):
|
for line in self._get_metadata(self.PKG_INFO):
|
||||||
if line.lower().startswith('version:'):
|
if line.lower().startswith('version:'):
|
||||||
self._version = safe_version(line.split(':',1)[1].strip())
|
self._version = safe_version(line.split(':', 1)[1].strip())
|
||||||
return self._version
|
return self._version
|
||||||
else:
|
else:
|
||||||
tmpl = "Missing 'Version:' header and/or %s file"
|
tmpl = "Missing 'Version:' header and/or %s file"
|
||||||
|
@ -2383,11 +2430,11 @@ class Distribution(object):
|
||||||
extra, marker = extra.split(':', 1)
|
extra, marker = extra.split(':', 1)
|
||||||
if invalid_marker(marker):
|
if invalid_marker(marker):
|
||||||
# XXX warn
|
# XXX warn
|
||||||
reqs=[]
|
reqs = []
|
||||||
elif not evaluate_marker(marker):
|
elif not evaluate_marker(marker):
|
||||||
reqs=[]
|
reqs = []
|
||||||
extra = safe_extra(extra) or None
|
extra = safe_extra(extra) or None
|
||||||
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
|
dm.setdefault(extra, []).extend(parse_requirements(reqs))
|
||||||
return dm
|
return dm
|
||||||
|
|
||||||
def requires(self, extras=()):
|
def requires(self, extras=()):
|
||||||
|
@ -2478,14 +2525,14 @@ class Distribution(object):
|
||||||
self._get_metadata('entry_points.txt'), self
|
self._get_metadata('entry_points.txt'), self
|
||||||
)
|
)
|
||||||
if group is not None:
|
if group is not None:
|
||||||
return ep_map.get(group,{})
|
return ep_map.get(group, {})
|
||||||
return ep_map
|
return ep_map
|
||||||
|
|
||||||
def get_entry_info(self, group, name):
|
def get_entry_info(self, group, name):
|
||||||
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
||||||
return self.get_entry_map(group).get(name)
|
return self.get_entry_map(group).get(name)
|
||||||
|
|
||||||
def insert_on(self, path, loc = None):
|
def insert_on(self, path, loc=None):
|
||||||
"""Insert self.location in path before its nearest parent directory"""
|
"""Insert self.location in path before its nearest parent directory"""
|
||||||
|
|
||||||
loc = loc or self.location
|
loc = loc or self.location
|
||||||
|
@ -2494,7 +2541,7 @@ class Distribution(object):
|
||||||
|
|
||||||
nloc = _normalize_cached(loc)
|
nloc = _normalize_cached(loc)
|
||||||
bdir = os.path.dirname(nloc)
|
bdir = os.path.dirname(nloc)
|
||||||
npath= [(p and _normalize_cached(p) or p) for p in path]
|
npath = [(p and _normalize_cached(p) or p) for p in path]
|
||||||
|
|
||||||
for p, item in enumerate(npath):
|
for p, item in enumerate(npath):
|
||||||
if item == nloc:
|
if item == nloc:
|
||||||
|
@ -2534,7 +2581,7 @@ class Distribution(object):
|
||||||
loc = normalize_path(self.location)
|
loc = normalize_path(self.location)
|
||||||
for modname in self._get_metadata('top_level.txt'):
|
for modname in self._get_metadata('top_level.txt'):
|
||||||
if (modname not in sys.modules or modname in nsp
|
if (modname not in sys.modules or modname in nsp
|
||||||
or modname in _namespace_packages):
|
or modname in _namespace_packages): # noqa: F821
|
||||||
continue
|
continue
|
||||||
if modname in ('pkg_resources', 'setuptools', 'site'):
|
if modname in ('pkg_resources', 'setuptools', 'site'):
|
||||||
continue
|
continue
|
||||||
|
@ -2555,7 +2602,7 @@ class Distribution(object):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clone(self,**kw):
|
def clone(self, **kw):
|
||||||
"""Copy this distribution, substituting in any changed keyword args"""
|
"""Copy this distribution, substituting in any changed keyword args"""
|
||||||
names = 'project_name version py_version platform location precedence'
|
names = 'project_name version py_version platform location precedence'
|
||||||
for attr in names.split():
|
for attr in names.split():
|
||||||
|
@ -2618,7 +2665,7 @@ class DistInfoDistribution(Distribution):
|
||||||
|
|
||||||
def reqs_for_extra(extra):
|
def reqs_for_extra(extra):
|
||||||
for req in reqs:
|
for req in reqs:
|
||||||
if req.marker_fn(override={'extra':extra}):
|
if req.marker_fn(override={'extra': extra}):
|
||||||
yield req
|
yield req
|
||||||
|
|
||||||
common = frozenset(reqs_for_extra(None))
|
common = frozenset(reqs_for_extra(None))
|
||||||
|
@ -2638,7 +2685,7 @@ _distributionImpl = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def issue_warning(*args,**kw):
|
def issue_warning(*args, **kw):
|
||||||
level = 1
|
level = 1
|
||||||
g = globals()
|
g = globals()
|
||||||
try:
|
try:
|
||||||
|
@ -2759,7 +2806,7 @@ class Requirement:
|
||||||
item = parse_version(item)
|
item = parse_version(item)
|
||||||
last = None
|
last = None
|
||||||
# -1, 0, 1
|
# -1, 0, 1
|
||||||
compare = lambda a, b: (a > b) - (a < b)
|
compare = lambda a, b: (a > b) - (a < b) # noqa: E731
|
||||||
for parsed, trans, op, ver in self.index:
|
for parsed, trans, op, ver in self.index:
|
||||||
# Indexing: 0, 1, -1
|
# Indexing: 0, 1, -1
|
||||||
action = trans[compare(item, parsed)]
|
action = trans[compare(item, parsed)]
|
||||||
|
@ -2779,7 +2826,8 @@ class Requirement:
|
||||||
def __hash__(self):
|
def __hash__(self):
|
||||||
return self.__hash
|
return self.__hash
|
||||||
|
|
||||||
def __repr__(self): return "Requirement.parse(%r)" % str(self)
|
def __repr__(self):
|
||||||
|
return "Requirement.parse(%r)" % str(self)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse(s):
|
def parse(s):
|
||||||
|
@ -2790,6 +2838,7 @@ class Requirement:
|
||||||
raise ValueError("Expected only one requirement", s)
|
raise ValueError("Expected only one requirement", s)
|
||||||
raise ValueError("No requirements found", s)
|
raise ValueError("No requirements found", s)
|
||||||
|
|
||||||
|
|
||||||
state_machine = {
|
state_machine = {
|
||||||
# =><
|
# =><
|
||||||
'<': '--T',
|
'<': '--T',
|
||||||
|
@ -2804,10 +2853,12 @@ state_machine = {
|
||||||
def _get_mro(cls):
|
def _get_mro(cls):
|
||||||
"""Get an mro for a type or classic class"""
|
"""Get an mro for a type or classic class"""
|
||||||
if not isinstance(cls, type):
|
if not isinstance(cls, type):
|
||||||
class cls(cls, object): pass
|
class cls(cls, object):
|
||||||
|
pass
|
||||||
return cls.__mro__[1:]
|
return cls.__mro__[1:]
|
||||||
return cls.__mro__
|
return cls.__mro__
|
||||||
|
|
||||||
|
|
||||||
def _find_adapter(registry, ob):
|
def _find_adapter(registry, ob):
|
||||||
"""Return an adapter factory for `ob` from `registry`"""
|
"""Return an adapter factory for `ob` from `registry`"""
|
||||||
for t in _get_mro(getattr(ob, '__class__', type(ob))):
|
for t in _get_mro(getattr(ob, '__class__', type(ob))):
|
||||||
|
@ -2857,12 +2908,13 @@ def split_sections(s):
|
||||||
# wrap up last segment
|
# wrap up last segment
|
||||||
yield section, content
|
yield section, content
|
||||||
|
|
||||||
def _mkstemp(*args,**kw):
|
|
||||||
|
def _mkstemp(*args, **kw):
|
||||||
old_open = os.open
|
old_open = os.open
|
||||||
try:
|
try:
|
||||||
# temporarily bypass sandboxing
|
# temporarily bypass sandboxing
|
||||||
os.open = os_open
|
os.open = os_open
|
||||||
return tempfile.mkstemp(*args,**kw)
|
return tempfile.mkstemp(*args, **kw)
|
||||||
finally:
|
finally:
|
||||||
# and then put it back
|
# and then put it back
|
||||||
os.open = old_open
|
os.open = old_open
|
||||||
|
@ -2870,10 +2922,14 @@ def _mkstemp(*args,**kw):
|
||||||
|
|
||||||
# Set up global resource manager (deliberately not state-saved)
|
# Set up global resource manager (deliberately not state-saved)
|
||||||
_manager = ResourceManager()
|
_manager = ResourceManager()
|
||||||
|
|
||||||
|
|
||||||
def _initialize(g):
|
def _initialize(g):
|
||||||
for name in dir(_manager):
|
for name in dir(_manager):
|
||||||
if not name.startswith('_'):
|
if not name.startswith('_'):
|
||||||
g[name] = getattr(_manager, name)
|
g[name] = getattr(_manager, name)
|
||||||
|
|
||||||
|
|
||||||
_initialize(globals())
|
_initialize(globals())
|
||||||
|
|
||||||
# Prepare the master working set and make the ``require()`` API available
|
# Prepare the master working set and make the ``require()`` API available
|
||||||
|
@ -2890,6 +2946,6 @@ run_main = run_script
|
||||||
# all distributions added to the working set in the future (e.g. by
|
# all distributions added to the working set in the future (e.g. by
|
||||||
# calling ``require()``) will get activated as well.
|
# calling ``require()``) will get activated as well.
|
||||||
add_activation_listener(lambda dist: dist.activate())
|
add_activation_listener(lambda dist: dist.activate())
|
||||||
working_set.entries=[]
|
working_set.entries = []
|
||||||
# match order
|
# match order
|
||||||
list(map(working_set.add_entry, sys.path))
|
list(map(working_set.add_entry, sys.path))
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from toolchain import PythonRecipe
|
from toolchain import PythonRecipe
|
||||||
|
|
||||||
|
|
||||||
class PlyerRecipe(PythonRecipe):
|
class PlyerRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/kivy/plyer/archive/{version}.zip"
|
url = "https://github.com/kivy/plyer/archive/{version}.zip"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'''Recipe for pycrypto on ios
|
'''Recipe for pycrypto on ios
|
||||||
'''
|
'''
|
||||||
from toolchain import CythonRecipe, shprint
|
from toolchain import CythonRecipe, shprint
|
||||||
from os.path import join, exists
|
from os.path import join
|
||||||
import sh
|
import sh
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@ -11,8 +11,7 @@ class PycryptoRecipe(CythonRecipe):
|
||||||
url = "https://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/pycrypto-{version}.tar.gz"
|
url = "https://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/pycrypto-{version}.tar.gz"
|
||||||
depends = ["python", "openssl"]
|
depends = ["python", "openssl"]
|
||||||
include_per_arch = True
|
include_per_arch = True
|
||||||
library="libpycrypto.a"
|
library = "libpycrypto.a"
|
||||||
|
|
||||||
|
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
build_env = arch.get_env()
|
build_env = arch.get_env()
|
||||||
|
@ -26,9 +25,7 @@ class PycryptoRecipe(CythonRecipe):
|
||||||
"--prefix=/",
|
"--prefix=/",
|
||||||
"--host={}".format(arch),
|
"--host={}".format(arch),
|
||||||
"ac_cv_func_malloc_0_nonnull=yes",
|
"ac_cv_func_malloc_0_nonnull=yes",
|
||||||
"ac_cv_func_realloc_0_nonnull=yes",
|
"ac_cv_func_realloc_0_nonnull=yes")
|
||||||
)
|
|
||||||
hostpython = sh.Command(self.ctx.hostpython)
|
|
||||||
super(PycryptoRecipe, self).build_arch(arch)
|
super(PycryptoRecipe, self).build_arch(arch)
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
|
@ -41,5 +38,5 @@ class PycryptoRecipe(CythonRecipe):
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = PycryptoRecipe()
|
|
||||||
|
|
||||||
|
recipe = PycryptoRecipe()
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
from toolchain import CythonRecipe
|
from toolchain import CythonRecipe
|
||||||
|
|
||||||
|
|
||||||
class PyobjusRecipe(CythonRecipe):
|
class PyobjusRecipe(CythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/kivy/pyobjus/archive/{version}.zip"
|
url = "https://github.com/kivy/pyobjus/archive/{version}.zip"
|
||||||
|
|
|
@ -6,6 +6,7 @@ from os.path import join
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PythonAliasRecipe(Recipe):
|
class PythonAliasRecipe(Recipe):
|
||||||
is_alias = True
|
is_alias = True
|
||||||
|
|
||||||
|
@ -26,4 +27,5 @@ class PythonAliasRecipe(Recipe):
|
||||||
self.depends = [python]
|
self.depends = [python]
|
||||||
self.recipe_dir = join(ctx.root_dir, "recipes", python)
|
self.recipe_dir = join(ctx.root_dir, "recipes", python)
|
||||||
|
|
||||||
|
|
||||||
recipe = PythonAliasRecipe()
|
recipe = PythonAliasRecipe()
|
||||||
|
|
|
@ -4,8 +4,10 @@ import sh
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Python2Recipe(Recipe):
|
class Python2Recipe(Recipe):
|
||||||
version = "2.7.1"
|
version = "2.7.1"
|
||||||
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
||||||
|
@ -88,6 +90,7 @@ class Python2Recipe(Recipe):
|
||||||
# architecture can lead to different pyconfig.h, we would need one patch
|
# architecture can lead to different pyconfig.h, we would need one patch
|
||||||
# per arch. Instead, express here the line we don't want / we want.
|
# per arch. Instead, express here the line we don't want / we want.
|
||||||
pyconfig = join(self.build_dir, "pyconfig.h")
|
pyconfig = join(self.build_dir, "pyconfig.h")
|
||||||
|
|
||||||
def _remove_line(lines, pattern):
|
def _remove_line(lines, pattern):
|
||||||
for line in lines[:]:
|
for line in lines[:]:
|
||||||
if pattern in line:
|
if pattern in line:
|
||||||
|
@ -130,7 +133,6 @@ class Python2Recipe(Recipe):
|
||||||
os.chdir(join(self.ctx.dist_dir, "root", "python2", "lib", "python2.7"))
|
os.chdir(join(self.ctx.dist_dir, "root", "python2", "lib", "python2.7"))
|
||||||
sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";")
|
sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";")
|
||||||
sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";")
|
sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";")
|
||||||
#sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";")
|
|
||||||
sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot")
|
sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot")
|
||||||
sh.rm("-rf", sh.glob("lib*"))
|
sh.rm("-rf", sh.glob("lib*"))
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,10 @@ Stub functions for _scproxy on iOS
|
||||||
No proxy is supported yet.
|
No proxy is supported yet.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def _get_proxy_settings():
|
def _get_proxy_settings():
|
||||||
return {'exclude_simple': 1}
|
return {'exclude_simple': 1}
|
||||||
|
|
||||||
|
|
||||||
def _get_proxies():
|
def _get_proxies():
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = 'kivy-ios'
|
__version__ = 'kivy-ios'
|
||||||
from ._sqlite3 import *
|
from ._sqlite3 import * # noqa: F401, F403
|
||||||
|
|
|
@ -3,6 +3,7 @@ import os
|
||||||
import sh
|
import sh
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
|
|
||||||
|
|
||||||
class PyYamlRecipe(PythonRecipe):
|
class PyYamlRecipe(PythonRecipe):
|
||||||
version = "3.11"
|
version = "3.11"
|
||||||
url = "https://pypi.python.org/packages/source/P/PyYAML/PyYAML-{version}.tar.gz"
|
url = "https://pypi.python.org/packages/source/P/PyYAML/PyYAML-{version}.tar.gz"
|
||||||
|
@ -18,4 +19,5 @@ class PyYamlRecipe(PythonRecipe):
|
||||||
build_env['PYTHONPATH'] = os.path.join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = os.path.join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
|
|
||||||
recipe = PyYamlRecipe()
|
recipe = PyYamlRecipe()
|
||||||
|
|
|
@ -24,4 +24,5 @@ class LibSDL2MixerRecipe(Recipe):
|
||||||
"-target", "libSDL_mixer-iOS",
|
"-target", "libSDL_mixer-iOS",
|
||||||
"-configuration", "Release")
|
"-configuration", "Release")
|
||||||
|
|
||||||
|
|
||||||
recipe = LibSDL2MixerRecipe()
|
recipe = LibSDL2MixerRecipe()
|
||||||
|
|
|
@ -30,4 +30,5 @@ class LibSDL2TTFRecipe(Recipe):
|
||||||
join(self.get_build_dir(arch.arch), "SDL_ttf.h"),
|
join(self.get_build_dir(arch.arch), "SDL_ttf.h"),
|
||||||
join(self.ctx.include_dir, "common", "SDL2"))
|
join(self.ctx.include_dir, "common", "SDL2"))
|
||||||
|
|
||||||
|
|
||||||
recipe = LibSDL2TTFRecipe()
|
recipe = LibSDL2TTFRecipe()
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# pure-python package, this can be removed when we'll support any python package
|
# pure-python package, this can be removed when we'll support any python package
|
||||||
from toolchain import PythonRecipe, shprint
|
from toolchain import PythonRecipe, shprint
|
||||||
from os.path import join
|
from os.path import join
|
||||||
import sh, os
|
import sh
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
class WerkzeugRecipe(PythonRecipe):
|
class WerkzeugRecipe(PythonRecipe):
|
||||||
version = "master"
|
version = "master"
|
||||||
url = "https://github.com/mitsuhiko/werkzeug/archive/{version}.zip"
|
url = "https://github.com/mitsuhiko/werkzeug/archive/{version}.zip"
|
||||||
depends = ["python","openssl"]
|
depends = ["python", "openssl"]
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
arch = list(self.filtered_archs)[0]
|
arch = list(self.filtered_archs)[0]
|
||||||
|
@ -18,5 +20,5 @@ class WerkzeugRecipe(PythonRecipe):
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||||
|
|
||||||
recipe = WerkzeugRecipe()
|
|
||||||
|
|
||||||
|
recipe = WerkzeugRecipe()
|
||||||
|
|
|
@ -5,12 +5,13 @@ import sh
|
||||||
import fnmatch
|
import fnmatch
|
||||||
from distutils.dir_util import copy_tree
|
from distutils.dir_util import copy_tree
|
||||||
|
|
||||||
|
|
||||||
class ZbarLightRecipe(Recipe):
|
class ZbarLightRecipe(Recipe):
|
||||||
version = '1.2'
|
version = '1.2'
|
||||||
url = 'https://github.com/Polyconseil/zbarlight/archive/{version}.tar.gz'
|
url = 'https://github.com/Polyconseil/zbarlight/archive/{version}.tar.gz'
|
||||||
library = "zbarlight.a"
|
library = "zbarlight.a"
|
||||||
depends = ['hostpython2','python2', 'libzbar']
|
depends = ['hostpython2', 'python2', 'libzbar']
|
||||||
pbx_libraries = ["libz","libbz2",'libc++','libsqlite3','CoreMotion']
|
pbx_libraries = ["libz", "libbz2", 'libc++', 'libsqlite3', 'CoreMotion']
|
||||||
include_per_arch = True
|
include_per_arch = True
|
||||||
|
|
||||||
def get_zbar_env(self, arch):
|
def get_zbar_env(self, arch):
|
||||||
|
@ -25,7 +26,7 @@ class ZbarLightRecipe(Recipe):
|
||||||
build_env["LIBRARY_PATH"] = join(arch.sysroot, "usr", "lib")
|
build_env["LIBRARY_PATH"] = join(arch.sysroot, "usr", "lib")
|
||||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||||
build_env["CFLAGS"] = " ".join([
|
build_env["CFLAGS"] = " ".join([
|
||||||
" -I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "libzbar",'zbar')) +
|
" -I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "libzbar", 'zbar')) +
|
||||||
" -arch {}".format(arch.arch)
|
" -arch {}".format(arch.arch)
|
||||||
])
|
])
|
||||||
build_env['LDFLAGS'] += " -lios -lpython -lzbar"
|
build_env['LDFLAGS'] += " -lios -lpython -lzbar"
|
||||||
|
@ -34,25 +35,26 @@ class ZbarLightRecipe(Recipe):
|
||||||
def build_arch(self, arch):
|
def build_arch(self, arch):
|
||||||
build_env = self.get_zbar_env(arch)
|
build_env = self.get_zbar_env(arch)
|
||||||
hostpython = sh.Command(self.ctx.hostpython)
|
hostpython = sh.Command(self.ctx.hostpython)
|
||||||
shprint(hostpython, "setup.py", "build",
|
shprint(hostpython, "setup.py", "build", # noqa: F821
|
||||||
_env=build_env)
|
_env=build_env)
|
||||||
self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
self.apply_patch("zbarlight_1_2.patch") # Issue getting the version, hard coding for now
|
||||||
self.biglink()
|
self.biglink()
|
||||||
|
|
||||||
def install(self):
|
def install(self):
|
||||||
arch = list(self.filtered_archs)[0]
|
arch = list(self.filtered_archs)[0]
|
||||||
build_dir = join(self.get_build_dir(arch.arch),'build','lib.macosx-10.13-x86_64-2.7','zbarlight')
|
build_dir = join(self.get_build_dir(arch.arch), 'build',
|
||||||
dist_dir = join(self.ctx.dist_dir,'root','python2','lib','python2.7','site-packages','zbarlight')
|
'lib.macosx-10.13-x86_64-2.7', 'zbarlight')
|
||||||
#Patch before Copying
|
dist_dir = join(self.ctx.dist_dir, 'root', 'python2', 'lib',
|
||||||
#self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
'python2.7', 'site-packages', 'zbarlight')
|
||||||
|
# Patch before Copying
|
||||||
|
# self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
||||||
copy_tree(build_dir, dist_dir)
|
copy_tree(build_dir, dist_dir)
|
||||||
os.remove(join(dist_dir,'_zbarlight.c'))
|
os.remove(join(dist_dir, '_zbarlight.c'))
|
||||||
|
|
||||||
|
|
||||||
def _patch__init__(self):
|
def _patch__init__(self):
|
||||||
init = join(self.ctx.dist_dir,'root','python2','lib','python2.7',
|
init = join(self.ctx.dist_dir, 'root', 'python2', 'lib', 'python2.7',
|
||||||
'site-packages','zbarlight', "__init__.py")
|
'site-packages', 'zbarlight', "__init__.py")
|
||||||
shprint(
|
shprint( # noqa: F821
|
||||||
sh.sed, "-i.bak",
|
sh.sed, "-i.bak",
|
||||||
"s/__version__ = pkg_resources.get_distribution('zbarlight').version'"
|
"s/__version__ = pkg_resources.get_distribution('zbarlight').version'"
|
||||||
"/__version__ = '{version}'/g",
|
"/__version__ = '{version}'/g",
|
||||||
|
@ -64,6 +66,7 @@ class ZbarLightRecipe(Recipe):
|
||||||
if fnmatch.filter(filenames, "*.so.libs"):
|
if fnmatch.filter(filenames, "*.so.libs"):
|
||||||
dirs.append(root)
|
dirs.append(root)
|
||||||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||||
shprint(cmd, join(self.build_dir, "zbarlight.a"), *dirs)
|
shprint(cmd, join(self.build_dir, "zbarlight.a"), *dirs) # noqa: F821
|
||||||
|
|
||||||
|
|
||||||
recipe = ZbarLightRecipe()
|
recipe = ZbarLightRecipe()
|
||||||
|
|
|
@ -9,7 +9,7 @@ def test_kivy():
|
||||||
import kivy
|
import kivy
|
||||||
import kivy.event
|
import kivy.event
|
||||||
import kivy.core.window
|
import kivy.core.window
|
||||||
import kivy.uix.widget
|
import kivy.uix.widget # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
def test_audiostream():
|
def test_audiostream():
|
||||||
|
@ -39,7 +39,7 @@ def test_numpy():
|
||||||
|
|
||||||
|
|
||||||
def test_curly():
|
def test_curly():
|
||||||
import curly
|
import curly # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
def run_test(f, name):
|
def run_test(f, name):
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
print("Python 3 running!")
|
print("Python 3 running!")
|
||||||
import sys
|
import sys
|
||||||
print(f"sys.path: {sys.path}")
|
print(f"sys.path: {sys.path}")
|
||||||
import os
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
modules_to_tests = [
|
modules_to_tests = [
|
||||||
|
@ -21,17 +20,18 @@ for name in modules_to_tests:
|
||||||
|
|
||||||
# test pyobjus
|
# test pyobjus
|
||||||
print("- import pyobjus start")
|
print("- import pyobjus start")
|
||||||
import pyobjus
|
import pyobjus # noqa: F401
|
||||||
print("- import done")
|
print("- import done")
|
||||||
from pyobjus import autoclass
|
from pyobjus import autoclass
|
||||||
NSNotificationCenter = autoclass("NSNotificationCenter")
|
NSNotificationCenter = autoclass("NSNotificationCenter")
|
||||||
|
|
||||||
# test ios
|
# test ios
|
||||||
import ios
|
import ios # noqa: F401
|
||||||
|
|
||||||
from kivy.app import App
|
from kivy.app import App
|
||||||
from kivy.lang import Builder
|
from kivy.lang import Builder
|
||||||
|
|
||||||
|
|
||||||
class TestApp(App):
|
class TestApp(App):
|
||||||
def build(self):
|
def build(self):
|
||||||
return Builder.load_string("""
|
return Builder.load_string("""
|
||||||
|
@ -51,4 +51,5 @@ RelativeLayout:
|
||||||
|
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
|
||||||
TestApp().run()
|
TestApp().run()
|
||||||
|
|
|
@ -7,6 +7,7 @@ import subprocess
|
||||||
# resolve cython executable
|
# resolve cython executable
|
||||||
cython = None
|
cython = None
|
||||||
|
|
||||||
|
|
||||||
def resolve_cython():
|
def resolve_cython():
|
||||||
global cython
|
global cython
|
||||||
for executable in ('cython', 'cython-2.7'):
|
for executable in ('cython', 'cython-2.7'):
|
||||||
|
@ -17,6 +18,7 @@ def resolve_cython():
|
||||||
cython = os.path.join(path, executable)
|
cython = os.path.join(path, executable)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def do(fn):
|
def do(fn):
|
||||||
print('cythonize:', fn)
|
print('cythonize:', fn)
|
||||||
assert(fn.endswith('.pyx'))
|
assert(fn.endswith('.pyx'))
|
||||||
|
@ -48,6 +50,7 @@ def do(fn):
|
||||||
with open(fn_c, 'w') as fd:
|
with open(fn_c, 'w') as fd:
|
||||||
fd.write(data)
|
fd.write(data)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
print('-- cythonize', sys.argv)
|
print('-- cythonize', sys.argv)
|
||||||
resolve_cython()
|
resolve_cython()
|
||||||
|
|
7
tox.ini
7
tox.ini
|
@ -13,6 +13,13 @@ deps = flake8
|
||||||
commands = flake8 recipes/ tools/ tests/ .ci/ toolchain.py
|
commands = flake8 recipes/ tools/ tests/ .ci/ toolchain.py
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
|
exclude = tools/external/,
|
||||||
|
toolchain.py, # Temporary removal: TODO: ZenCODE
|
||||||
|
recipes/kivy, # Temporary removal: TODO: ZenCODE
|
||||||
|
recipes/python3, # Temporary removal: TODO: ZenCODE
|
||||||
|
recipes/hostpython3, # Temporary removal: TODO: ZenCODE
|
||||||
|
recipes/host_setuptools3, # Temporary removal: TODO: ZenCODE
|
||||||
|
|
||||||
ignore =
|
ignore =
|
||||||
E123, # Closing bracket does not match indentation of opening bracket's line
|
E123, # Closing bracket does not match indentation of opening bracket's line
|
||||||
E124, # Closing bracket does not match visual indentation
|
E124, # Closing bracket does not match visual indentation
|
||||||
|
|
Loading…
Reference in a new issue