Flake8 CI fixes (#451)
* Pep8 fixes * tox Pep8 compliance * Excluded external tools folder from flake 8 tests * Added Flake 8 exclusions * Pep8 fixes * Pep8 fixes * Corrected type * Pep8 fixes * Pep 8 compliance * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep 8 fixes * Pep 8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 * Pep8 * Pep 8 * Pep 8 * Pep8 fixes * Pep8 * Pep8 * Pep8 * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Pep8 fixes * Revert chagnes * Revert changes to kivy/__init.py * Revert changes * REvert changes * Revert changes * Revert changes to toolchain * Add files exclusions to tox.ini * Added exclusions for alias recipes * Remove dead code * Added py extension to recipes * Removed recipe build skip * Improves recipe matching Previous expression was matching all the following three lines of a `git diff --name-only` output. ``` recipes/hostlibffi/__init__.py recipes/hostpython.py recipes/hostpython2/__init__.py ``` This was resulting to a bug when later splitting with `recipe = file_path.split('/')[1]` the `recipes/hostpython.py` string would return including the `\n` new line char, see: ``` >>> 'recipes/hostpython.py\n'.split('/')[1] 'hostpython.py\n' >>> 'recipes/hostlibffi/__init__.py\n'.split('/')[1] 'hostlibffi' >>> ``` Co-authored-by: Andre Miras <AndreMiras@users.noreply.github.com>
This commit is contained in:
parent
85f849e187
commit
64bd692632
46 changed files with 328 additions and 241 deletions
|
@ -1,6 +1,7 @@
|
|||
import sh
|
||||
import shutil
|
||||
import subprocess
|
||||
from fnmatch import fnmatch
|
||||
|
||||
|
||||
def modified_recipes(branch='origin/master'):
|
||||
"""
|
||||
|
@ -14,11 +15,12 @@ def modified_recipes(branch='origin/master'):
|
|||
git_diff = sh.contrib.git.diff('--name-only', branch)
|
||||
recipes = set()
|
||||
for file_path in git_diff:
|
||||
if 'recipes/' in file_path:
|
||||
if fnmatch(file_path, "recipes/*/__init__.py\n"):
|
||||
recipe = file_path.split('/')[1]
|
||||
recipes.add(recipe)
|
||||
return recipes
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
updated_recipes = " ".join(modified_recipes())
|
||||
if updated_recipes != '':
|
||||
|
|
|
@ -10,4 +10,3 @@ class AudiostreamRecipe(CythonRecipe):
|
|||
|
||||
|
||||
recipe = AudiostreamRecipe()
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class ClickRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
|
@ -20,5 +22,5 @@ class ClickRecipe(PythonRecipe):
|
|||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = ClickRecipe()
|
||||
|
||||
recipe = ClickRecipe()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from toolchain import CythonRecipe
|
||||
|
||||
|
||||
class CurlyRecipe(CythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/tito/curly/archive/{version}.zip"
|
||||
|
|
|
@ -3,10 +3,7 @@ Author: Lawrence Du, Lukasz Mach
|
|||
E-mail: larrydu88@gmail.com, maho@pagema.net
|
||||
"""
|
||||
|
||||
from toolchain import CythonRecipe,shprint
|
||||
import os
|
||||
from os.path import join
|
||||
import sh
|
||||
from toolchain import CythonRecipe
|
||||
|
||||
|
||||
class CymunkRecipe(CythonRecipe):
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class DistributeRecipe(PythonRecipe):
|
||||
version = "0.7.3"
|
||||
|
@ -18,4 +20,5 @@ class DistributeRecipe(PythonRecipe):
|
|||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
|
||||
recipe = DistributeRecipe()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from toolchain import Recipe, shprint
|
||||
from os.path import join, exists
|
||||
from os.path import join
|
||||
import sh
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from toolchain import CythonRecipe, shprint
|
||||
from toolchain import CythonRecipe
|
||||
from os.path import join
|
||||
import sh
|
||||
|
||||
|
||||
class FFPyplayerRecipe(CythonRecipe):
|
||||
|
@ -34,4 +33,3 @@ class FFPyplayerRecipe(CythonRecipe):
|
|||
|
||||
|
||||
recipe = FFPyplayerRecipe()
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class FlaskRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/mitsuhiko/flask/archive/{version}.zip"
|
||||
depends = ["python","jinja2","werkzeug","itsdangerous","click"]
|
||||
depends = ["python", "jinja2", "werkzeug", "itsdangerous", "click"]
|
||||
|
||||
def install(self):
|
||||
arch = list(self.filtered_archs)[0]
|
||||
|
@ -20,5 +22,5 @@ class FlaskRecipe(PythonRecipe):
|
|||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = FlaskRecipe()
|
||||
|
||||
recipe = FlaskRecipe()
|
||||
|
|
|
@ -32,4 +32,3 @@ class FreetypeRecipe(Recipe):
|
|||
|
||||
|
||||
recipe = FreetypeRecipe()
|
||||
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
from toolchain import Recipe, shprint
|
||||
from os.path import join, exists
|
||||
from os.path import join
|
||||
import sh
|
||||
import os
|
||||
import fnmatch
|
||||
import shutil
|
||||
|
||||
|
||||
|
@ -31,4 +30,5 @@ class HostSetuptools(Recipe):
|
|||
os.remove('easy-install.pth')
|
||||
shutil.rmtree('EGG-INFO')
|
||||
|
||||
|
||||
recipe = HostSetuptools()
|
||||
|
|
|
@ -5,6 +5,7 @@ import logging
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LibffiRecipe(Recipe):
|
||||
version = "3.2.1"
|
||||
url = "ftp://sourceware.org/pub/libffi/libffi-{version}.tar.gz"
|
||||
|
@ -29,7 +30,7 @@ class LibffiRecipe(Recipe):
|
|||
return
|
||||
# necessary as it doesn't compile with XCode 6.0. If we use 5.1.1, the
|
||||
# compiler for i386 is not working.
|
||||
#shprint(sh.sed,
|
||||
# shprint(sh.sed,
|
||||
# "-i.bak",
|
||||
# "s/-miphoneos-version-min=5.1.1/-miphoneos-version-min=6.0/g",
|
||||
# "generate-darwin-source-and-headers.py")
|
||||
|
@ -63,4 +64,5 @@ class LibffiRecipe(Recipe):
|
|||
def postbuild_arch(self, arch):
|
||||
pass
|
||||
|
||||
|
||||
recipe = LibffiRecipe()
|
||||
|
|
|
@ -5,6 +5,7 @@ import logging
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostpythonAliasRecipe(Recipe):
|
||||
is_alias = True
|
||||
|
||||
|
@ -24,4 +25,5 @@ class HostpythonAliasRecipe(Recipe):
|
|||
if hostpython:
|
||||
self.depends = [hostpython]
|
||||
|
||||
|
||||
recipe = HostpythonAliasRecipe()
|
||||
|
|
|
@ -3,8 +3,10 @@ Stub functions for _scproxy on OsX
|
|||
No proxy is supported yet.
|
||||
'''
|
||||
|
||||
|
||||
def _get_proxy_settings():
|
||||
return {'exclude_simple': 1}
|
||||
|
||||
|
||||
def _get_proxies():
|
||||
return {}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
from distutils.core import setup, Extension
|
||||
import os
|
||||
|
||||
|
||||
setup(name='ios',
|
||||
version='1.1',
|
||||
ext_modules=[
|
||||
Extension(
|
||||
'ios', ['ios.c', 'ios_utils.m', 'ios_mail.m', 'ios_browser.m',
|
||||
Extension('ios',
|
||||
['ios.c', 'ios_utils.m', 'ios_mail.m', 'ios_browser.m',
|
||||
'ios_filechooser.m'],
|
||||
libraries=[ ],
|
||||
libraries=[],
|
||||
library_dirs=[],
|
||||
)
|
||||
]
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class ItsDangerousRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
|
@ -20,5 +22,5 @@ class ItsDangerousRecipe(PythonRecipe):
|
|||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = ItsDangerousRecipe()
|
||||
|
||||
recipe = ItsDangerousRecipe()
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class Jinja2Recipe(PythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/mitsuhiko/jinja2/archive/{version}.zip"
|
||||
depends = ["python","markupsafe"]
|
||||
depends = ["python", "markupsafe"]
|
||||
|
||||
def install(self):
|
||||
arch = list(self.filtered_archs)[0]
|
||||
|
@ -20,5 +22,5 @@ class Jinja2Recipe(PythonRecipe):
|
|||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = Jinja2Recipe()
|
||||
|
||||
recipe = Jinja2Recipe()
|
||||
|
|
|
@ -6,7 +6,7 @@ E-mail: larrydu88@gmail.com
|
|||
from toolchain import CythonRecipe, shprint
|
||||
import sh
|
||||
from os.path import join
|
||||
from os import environ, chdir
|
||||
from os import chdir
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -16,46 +16,41 @@ class KiventCoreRecipe(CythonRecipe):
|
|||
version = 'master'
|
||||
url = 'https://github.com/kivy/kivent/archive/{version}.zip'
|
||||
name = 'kivent_core'
|
||||
depends = ['libffi','kivy'] #note: unsure if libffi is necessary here
|
||||
pre_build_ext=False
|
||||
depends = ['libffi', 'kivy'] # note: unsure if libffi is necessary here
|
||||
pre_build_ext = False
|
||||
subbuilddir = False
|
||||
cythonize = True
|
||||
pbx_frameworks = ["OpenGLES"] #note: This line may be unnecessary
|
||||
|
||||
pbx_frameworks = ["OpenGLES"] # note: This line may be unnecessary
|
||||
|
||||
def get_recipe_env(self, arch):
|
||||
env = super(KiventCoreRecipe,self).get_recipe_env(arch)
|
||||
env = super(KiventCoreRecipe, self).get_recipe_env(arch)
|
||||
env['CYTHONPATH'] = self.get_recipe(
|
||||
'kivy', self.ctx).get_build_dir(arch.arch)
|
||||
return env
|
||||
|
||||
|
||||
def get_build_dir(self,arch, sub=False):
|
||||
def get_build_dir(self, arch, sub=False):
|
||||
"""
|
||||
Call this to get the correct build_dir, where setup.py is located which is
|
||||
actually under modules/core/setup.py
|
||||
"""
|
||||
builddir = super(KiventCoreRecipe, self).get_build_dir(str(arch))
|
||||
if sub or self.subbuilddir:
|
||||
core_build_dir = join (builddir, 'modules', 'core')
|
||||
core_build_dir = join(builddir, 'modules', 'core')
|
||||
logger.info("Core build directory is located at {}".format(core_build_dir))
|
||||
return core_build_dir
|
||||
else:
|
||||
logger.info("Building in {}".format(builddir))
|
||||
return builddir
|
||||
|
||||
|
||||
def build_arch(self, arch):
|
||||
"""
|
||||
Override build.arch to avoid calling setup.py here (Call it in
|
||||
install() instead).
|
||||
"""
|
||||
|
||||
self.subbuildir = True
|
||||
self.cythonize_build()
|
||||
self.biglink()
|
||||
self.subbuilddir=False
|
||||
|
||||
self.subbuilddir = False
|
||||
|
||||
def install(self):
|
||||
"""
|
||||
|
@ -73,35 +68,35 @@ class KiventCoreRecipe(CythonRecipe):
|
|||
"""
|
||||
arch = list(self.filtered_archs)[0]
|
||||
|
||||
build_dir = self.get_build_dir(arch.arch,sub=True)
|
||||
logger.info("Building kivent_core {} in {}".format(arch.arch,build_dir))
|
||||
build_dir = self.get_build_dir(arch.arch, sub=True)
|
||||
logger.info("Building kivent_core {} in {}".format(arch.arch, build_dir))
|
||||
chdir(build_dir)
|
||||
hostpython = sh.Command(self.ctx.hostpython)
|
||||
|
||||
#Get the appropriate environment for this recipe (including CYTHONPATH)
|
||||
#build_env = arch.get_env()
|
||||
# Get the appropriate environment for this recipe (including CYTHONPATH)
|
||||
# build_env = arch.get_env()
|
||||
build_env = self.get_recipe_env(arch)
|
||||
|
||||
dest_dir = join (self.ctx.dist_dir, "root", "python")
|
||||
dest_dir = join(self.ctx.dist_dir, "root", "python")
|
||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
|
||||
#Add Architecture specific kivy path for 'import kivy' to PYTHONPATH
|
||||
# Add Architecture specific kivy path for 'import kivy' to PYTHONPATH
|
||||
arch_kivy_path = self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch)
|
||||
build_env['PYTHONPATH'] = join( build_env['PYTHONPATH'],':',arch_kivy_path)
|
||||
build_env['PYTHONPATH'] = join(build_env['PYTHONPATH'], ':', arch_kivy_path)
|
||||
|
||||
#Make sure you call kivent_core/modules/core/setup.py
|
||||
subdir_path = self.get_build_dir(str(arch),sub=True)
|
||||
setup_path = join(subdir_path,"setup.py")
|
||||
# Make sure you call kivent_core/modules/core/setup.py
|
||||
subdir_path = self.get_build_dir(str(arch), sub=True)
|
||||
setup_path = join(subdir_path, "setup.py")
|
||||
|
||||
|
||||
#Print out directories for sanity check
|
||||
# Print out directories for sanity check
|
||||
logger.info("ENVS", build_env)
|
||||
logger.info("ROOT",self.ctx.root_dir)
|
||||
logger.info("BUILD",self.ctx.build_dir)
|
||||
logger.info("ROOT", self.ctx.root_dir)
|
||||
logger.info("BUILD", self.ctx.build_dir)
|
||||
logger.info("INCLUDE", self.ctx.include_dir)
|
||||
logger.info("DISTDIR", self.ctx.dist_dir)
|
||||
logger.info("ARCH KIVY LOC",self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch))
|
||||
logger.info("ARCH KIVY LOC", self.get_recipe('kivy', self.ctx).get_build_dir(arch.arch))
|
||||
|
||||
shprint(hostpython, setup_path, "build_ext", "install", _env=build_env)
|
||||
|
||||
|
||||
recipe = KiventCoreRecipe()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from toolchain import Recipe, shprint
|
||||
from os.path import join, exists
|
||||
from os.path import join
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class CurlRecipe(Recipe):
|
||||
|
@ -11,7 +10,6 @@ class CurlRecipe(Recipe):
|
|||
include_dir = "include"
|
||||
depends = ["openssl"]
|
||||
|
||||
|
||||
def build_arch(self, arch):
|
||||
build_env = arch.get_env()
|
||||
configure = sh.Command(join(self.build_dir, "configure"))
|
||||
|
@ -27,6 +25,5 @@ class CurlRecipe(Recipe):
|
|||
shprint(sh.make, "clean")
|
||||
shprint(sh.make, self.ctx.concurrent_make)
|
||||
|
||||
|
||||
recipe = CurlRecipe()
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
from toolchain import Recipe, shprint
|
||||
from os.path import join, exists
|
||||
from os.path import join
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class JpegRecipe(Recipe):
|
||||
|
@ -16,7 +15,6 @@ class JpegRecipe(Recipe):
|
|||
]
|
||||
include_per_arch = True
|
||||
|
||||
|
||||
def build_arch(self, arch):
|
||||
build_env = arch.get_env()
|
||||
configure = sh.Command(join(self.build_dir, "configure"))
|
||||
|
@ -31,6 +29,5 @@ class JpegRecipe(Recipe):
|
|||
shprint(sh.make, "clean")
|
||||
shprint(sh.make, self.ctx.concurrent_make)
|
||||
|
||||
|
||||
recipe = JpegRecipe()
|
||||
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ from toolchain import Recipe, shprint
|
|||
from os.path import join
|
||||
import sh
|
||||
|
||||
|
||||
class PngRecipe(Recipe):
|
||||
version = '1.6.26'
|
||||
url = 'http://downloads.sourceforge.net/sourceforge/libpng/libpng-{version}.tar.gz'
|
||||
|
@ -23,4 +24,5 @@ class PngRecipe(Recipe):
|
|||
shprint(sh.make, "clean")
|
||||
shprint(sh.make, self.ctx.concurrent_make, _env=build_env)
|
||||
|
||||
|
||||
recipe = PngRecipe()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from toolchain import Recipe,shprint
|
||||
from toolchain import Recipe, shprint
|
||||
from os.path import join
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class LibZBarRecipe(Recipe):
|
||||
|
||||
|
@ -28,11 +28,12 @@ class LibZBarRecipe(Recipe):
|
|||
super(LibZBarRecipe, self).build_arch(arch)
|
||||
build_env = arch.get_env()
|
||||
build_env["CFLAGS"] = " ".join([
|
||||
"-I{}".format(join(self.ctx.dist_dir, "build","libiconv",arch.arch)) +
|
||||
" -arch {}".format(arch.arch),build_env['CFLAGS']
|
||||
"-I{}".format(join(self.ctx.dist_dir, "build", "libiconv", arch.arch)) +
|
||||
" -arch {}".format(arch.arch), build_env['CFLAGS']
|
||||
])
|
||||
shprint(sh.Command('autoreconf') ,'-vif')
|
||||
shprint(sh.Command('./configure'),
|
||||
shprint(sh.Command('autoreconf'), '-vif')
|
||||
shprint(
|
||||
sh.Command('./configure'),
|
||||
"CC={}".format(build_env["CC"]),
|
||||
"LD={}".format(build_env["LD"]),
|
||||
"CFLAGS={}".format(build_env["CFLAGS"]),
|
||||
|
@ -53,4 +54,5 @@ class LibZBarRecipe(Recipe):
|
|||
shprint(sh.make, 'clean')
|
||||
shprint(sh.make, _env=build_env)
|
||||
|
||||
|
||||
recipe = LibZBarRecipe()
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class MarkupSafeRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
|
@ -20,9 +22,8 @@ class MarkupSafeRecipe(PythonRecipe):
|
|||
shprint(cmd, "-i", "", "s/,.*Feature//g", "./setup.py", _env=build_env)
|
||||
shprint(cmd, "-i", "", "s/setuptools/distutils.core/g", "./setup.py", _env=build_env)
|
||||
shprint(cmd, "-i", "", "/^speedups = Feature/,/^)$/s/.*//g", "./setup.py", _env=build_env)
|
||||
shprint(cmd, "-i", "", "s/features\['speedups'\].*=.*speedups/pass/g", "./setup.py", _env=build_env)
|
||||
shprint(cmd, "-i", "", "s/features\['speedups'\].*=.*speedups/pass/g", "./setup.py", _env=build_env) # noqa: W605
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
|
||||
recipe = MarkupSafeRecipe()
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from toolchain import CythonRecipe, shprint
|
||||
from toolchain import CythonRecipe
|
||||
from os.path import join
|
||||
from os import chdir, listdir
|
||||
import sh
|
||||
import shutil
|
||||
|
||||
|
|
|
@ -32,4 +32,5 @@ class OpensslRecipe(Recipe):
|
|||
shprint(sh.make, "clean")
|
||||
shprint(sh.make, self.ctx.concurrent_make, "build_libs")
|
||||
|
||||
|
||||
recipe = OpensslRecipe()
|
||||
|
|
|
@ -11,6 +11,5 @@ class PhotoRecipe(CythonRecipe):
|
|||
def install(self):
|
||||
self.install_python_package(name="photolibrary.so", is_dir=False)
|
||||
|
||||
|
||||
recipe = PhotoRecipe()
|
||||
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ import fnmatch
|
|||
class PillowRecipe(Recipe):
|
||||
version = "2.8.2"
|
||||
url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-{version}.tar.gz"
|
||||
#url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
||||
# url = "https://github.com/python-pillow/Pillow/archive/{version}.tar.gz"
|
||||
library = "libpil.a"
|
||||
depends = ["hostpython", "host_setuptools", "pkgresources", "freetype", "libjpeg", "python", "ios"]
|
||||
pbx_libraries = ["libz", "libbz2"]
|
||||
|
@ -33,9 +33,9 @@ class PillowRecipe(Recipe):
|
|||
def build_arch(self, arch):
|
||||
self.apply_patch('pil_setup.patch')
|
||||
build_env = self.get_pil_env(arch)
|
||||
#build_dir = self.get_build_dir(arch.arch)
|
||||
# build_dir = self.get_build_dir(arch.arch)
|
||||
hostpython = sh.Command(self.ctx.hostpython)
|
||||
#build_env["PYTHONHOME"] = hostpython
|
||||
# build_env["PYTHONHOME"] = hostpython
|
||||
# first try to generate .h
|
||||
shprint(hostpython, "setup.py", "build_ext", "-g",
|
||||
_env=build_env)
|
||||
|
@ -61,5 +61,5 @@ class PillowRecipe(Recipe):
|
|||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||
shprint(cmd, join(self.build_dir, "libpil.a"), *dirs)
|
||||
|
||||
recipe = PillowRecipe()
|
||||
|
||||
recipe = PillowRecipe()
|
||||
|
|
|
@ -56,5 +56,5 @@ class PillowRecipe(Recipe):
|
|||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||
shprint(cmd, join(self.build_dir, "libpillow.a"), *dirs)
|
||||
|
||||
recipe = PillowRecipe()
|
||||
|
||||
recipe = PillowRecipe()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from toolchain import Recipe, shprint
|
||||
from toolchain import Recipe
|
||||
from os.path import join
|
||||
import sh
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""
|
||||
""" # noqa E902
|
||||
Package resource API
|
||||
--------------------
|
||||
|
||||
|
@ -10,7 +10,7 @@ names being passed into the API.
|
|||
|
||||
The package resource API is designed to work with normal filesystem packages,
|
||||
.egg files, and unpacked .egg files. It can also work in a limited way with
|
||||
.zip files and with custom PEP 302 loaders that support the ``get_data()``
|
||||
.zip files and with custom PEP 302 loaders that support the ``get_data()``.
|
||||
method.
|
||||
"""
|
||||
|
||||
|
@ -41,8 +41,7 @@ PY2 = not PY3
|
|||
|
||||
if PY3:
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
if PY2:
|
||||
else:
|
||||
from urlparse import urlparse, urlunparse
|
||||
|
||||
if PY3:
|
||||
|
@ -76,10 +75,12 @@ except ImportError:
|
|||
|
||||
_state_vars = {}
|
||||
|
||||
|
||||
def _declare_state(vartype, **kw):
|
||||
globals().update(kw)
|
||||
_state_vars.update(dict.fromkeys(kw, vartype))
|
||||
|
||||
|
||||
def __getstate__():
|
||||
state = {}
|
||||
g = globals()
|
||||
|
@ -87,25 +88,31 @@ def __getstate__():
|
|||
state[k] = g['_sget_'+v](g[k])
|
||||
return state
|
||||
|
||||
|
||||
def __setstate__(state):
|
||||
g = globals()
|
||||
for k, v in state.items():
|
||||
g['_sset_'+_state_vars[k]](k, g[k], v)
|
||||
return state
|
||||
|
||||
|
||||
def _sget_dict(val):
|
||||
return val.copy()
|
||||
|
||||
|
||||
def _sset_dict(key, ob, state):
|
||||
ob.clear()
|
||||
ob.update(state)
|
||||
|
||||
|
||||
def _sget_object(val):
|
||||
return val.__getstate__()
|
||||
|
||||
|
||||
def _sset_object(key, ob, state):
|
||||
ob.__setstate__(state)
|
||||
|
||||
|
||||
_sget_none = _sset_none = lambda *args: None
|
||||
|
||||
|
||||
|
@ -132,6 +139,7 @@ def get_supported_platform():
|
|||
pass
|
||||
return plat
|
||||
|
||||
|
||||
__all__ = [
|
||||
# Basic resource access and distribution/entry point discovery
|
||||
'require', 'run_script', 'get_provider', 'get_distribution',
|
||||
|
@ -175,19 +183,25 @@ __all__ = [
|
|||
'run_main', 'AvailableDistributions',
|
||||
]
|
||||
|
||||
|
||||
class ResolutionError(Exception):
|
||||
"""Abstract base for dependency resolution errors"""
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__+repr(self.args)
|
||||
|
||||
|
||||
class VersionConflict(ResolutionError):
|
||||
"""An already-installed version conflicts with the requested version"""
|
||||
|
||||
|
||||
class DistributionNotFound(ResolutionError):
|
||||
"""A requested distribution was not found"""
|
||||
|
||||
|
||||
class UnknownExtra(ResolutionError):
|
||||
"""Distribution doesn't have an "extra feature" of the given name"""
|
||||
|
||||
|
||||
_provider_factories = {}
|
||||
|
||||
PY_MAJOR = sys.version[:3]
|
||||
|
@ -197,6 +211,7 @@ SOURCE_DIST = 1
|
|||
CHECKOUT_DIST = 0
|
||||
DEVELOP_DIST = -1
|
||||
|
||||
|
||||
def register_loader_type(loader_type, provider_factory):
|
||||
"""Register `provider_factory` to make providers for `loader_type`
|
||||
|
||||
|
@ -206,6 +221,7 @@ def register_loader_type(loader_type, provider_factory):
|
|||
"""
|
||||
_provider_factories[loader_type] = provider_factory
|
||||
|
||||
|
||||
def get_provider(moduleOrReq):
|
||||
"""Return an IResourceProvider for the named module or requirement"""
|
||||
if isinstance(moduleOrReq, Requirement):
|
||||
|
@ -218,6 +234,7 @@ def get_provider(moduleOrReq):
|
|||
loader = getattr(module, '__loader__', None)
|
||||
return _find_adapter(_provider_factories, loader)(module)
|
||||
|
||||
|
||||
def _macosx_vers(_cache=[]):
|
||||
if not _cache:
|
||||
version = platform.mac_ver()[0]
|
||||
|
@ -233,9 +250,11 @@ def _macosx_vers(_cache=[]):
|
|||
_cache.append(version.split('.'))
|
||||
return _cache[0]
|
||||
|
||||
|
||||
def _macosx_arch(machine):
|
||||
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
|
||||
|
||||
|
||||
def get_build_platform():
|
||||
"""Return this platform's string for platform-specific distributions
|
||||
|
||||
|
@ -261,6 +280,7 @@ def get_build_platform():
|
|||
pass
|
||||
return plat
|
||||
|
||||
|
||||
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
|
||||
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
|
||||
# XXX backward compat
|
||||
|
@ -274,7 +294,7 @@ def compatible_platforms(provided, required):
|
|||
|
||||
XXX Needs compatibility checks for Linux and other unixy OSes.
|
||||
"""
|
||||
if provided is None or required is None or provided==required:
|
||||
if provided is None or required is None or provided == required:
|
||||
# easy case
|
||||
return True
|
||||
|
||||
|
@ -321,9 +341,11 @@ def run_script(dist_spec, script_name):
|
|||
ns['__name__'] = name
|
||||
require(dist_spec)[0].run_script(script_name, ns)
|
||||
|
||||
|
||||
# backward compatibility
|
||||
run_main = run_script
|
||||
|
||||
|
||||
def get_distribution(dist):
|
||||
"""Return a current distribution object for a Requirement or string"""
|
||||
if isinstance(dist, string_types):
|
||||
|
@ -334,14 +356,17 @@ def get_distribution(dist):
|
|||
raise TypeError("Expected string, Requirement, or Distribution", dist)
|
||||
return dist
|
||||
|
||||
|
||||
def load_entry_point(dist, group, name):
|
||||
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
|
||||
return get_distribution(dist).load_entry_point(group, name)
|
||||
|
||||
|
||||
def get_entry_map(dist, group=None):
|
||||
"""Return the entry point map for `group`, or the full entry map"""
|
||||
return get_distribution(dist).get_entry_map(group)
|
||||
|
||||
|
||||
def get_entry_info(dist, group, name):
|
||||
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
||||
return get_distribution(dist).get_entry_info(group, name)
|
||||
|
@ -530,7 +555,7 @@ class WorkingSet(object):
|
|||
|
||||
for key in self.entry_keys[item]:
|
||||
if key not in seen:
|
||||
seen[key]=1
|
||||
seen[key] = 1
|
||||
yield self.by_key[key]
|
||||
|
||||
def add(self, dist, entry=None, insert=True, replace=False):
|
||||
|
@ -550,8 +575,8 @@ class WorkingSet(object):
|
|||
|
||||
if entry is None:
|
||||
entry = dist.location
|
||||
keys = self.entry_keys.setdefault(entry,[])
|
||||
keys2 = self.entry_keys.setdefault(dist.location,[])
|
||||
keys = self.entry_keys.setdefault(entry, [])
|
||||
keys2 = self.entry_keys.setdefault(dist.location, [])
|
||||
if not replace and dist.key in self.by_key:
|
||||
# ignore hidden distros
|
||||
return
|
||||
|
@ -617,10 +642,6 @@ class WorkingSet(object):
|
|||
ws = WorkingSet([])
|
||||
dist = best[req.key] = env.best_match(req, ws, installer)
|
||||
if dist is None:
|
||||
#msg = ("The '%s' distribution was not found on this "
|
||||
# "system, and is required by this application.")
|
||||
#raise DistributionNotFound(msg % req)
|
||||
|
||||
# unfortunately, zc.buildout uses a str(err)
|
||||
# to get the name of the distribution here..
|
||||
raise DistributionNotFound(req)
|
||||
|
@ -805,7 +826,7 @@ class Environment(object):
|
|||
is returned.
|
||||
"""
|
||||
return (self.python is None or dist.py_version is None
|
||||
or dist.py_version==self.python) \
|
||||
or dist.py_version == self.python) \
|
||||
and compatible_platforms(dist.platform, self.platform)
|
||||
|
||||
def remove(self, dist):
|
||||
|
@ -1012,7 +1033,7 @@ variable to point to an accessible directory.
|
|||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
||||
try:
|
||||
_bypass_ensure_directory(target_path)
|
||||
except:
|
||||
except Exception:
|
||||
self.extraction_error()
|
||||
|
||||
self._warn_unsafe_extraction_path(extract_path)
|
||||
|
@ -1103,6 +1124,7 @@ variable to point to an accessible directory.
|
|||
"""
|
||||
# XXX
|
||||
|
||||
|
||||
def get_default_cache():
|
||||
"""Determine the default cache location
|
||||
|
||||
|
@ -1115,7 +1137,7 @@ def get_default_cache():
|
|||
except KeyError:
|
||||
pass
|
||||
|
||||
if os.name!='nt':
|
||||
if os.name != 'nt':
|
||||
return os.path.expanduser('~/.python-eggs')
|
||||
|
||||
# XXX this may be locale-specific!
|
||||
|
@ -1124,7 +1146,7 @@ def get_default_cache():
|
|||
# best option, should be locale-safe
|
||||
(('APPDATA',), None),
|
||||
(('USERPROFILE',), app_data),
|
||||
(('HOMEDRIVE','HOMEPATH'), app_data),
|
||||
(('HOMEDRIVE', 'HOMEPATH'), app_data),
|
||||
(('HOMEPATH',), app_data),
|
||||
(('HOME',), None),
|
||||
# 95/98/ME
|
||||
|
@ -1147,6 +1169,7 @@ def get_default_cache():
|
|||
"Please set the PYTHON_EGG_CACHE enviroment variable"
|
||||
)
|
||||
|
||||
|
||||
def safe_name(name):
|
||||
"""Convert an arbitrary string to a standard distribution name
|
||||
|
||||
|
@ -1161,7 +1184,7 @@ def safe_version(version):
|
|||
Spaces become dots, and all other non-alphanumeric characters become
|
||||
dashes, with runs of multiple dashes condensed to a single dash.
|
||||
"""
|
||||
version = version.replace(' ','.')
|
||||
version = version.replace(' ', '.')
|
||||
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
||||
|
||||
|
||||
|
@ -1179,7 +1202,7 @@ def to_filename(name):
|
|||
|
||||
Any '-' characters are currently replaced with '_'.
|
||||
"""
|
||||
return name.replace('-','_')
|
||||
return name.replace('-', '_')
|
||||
|
||||
|
||||
class MarkerEvaluation(object):
|
||||
|
@ -1325,11 +1348,12 @@ class MarkerEvaluation(object):
|
|||
if 'parser' not in globals():
|
||||
# Fall back to less-complete _markerlib implementation if 'parser' module
|
||||
# is not available.
|
||||
evaluate_marker = _markerlib_evaluate
|
||||
evaluate_marker = _markerlib_evaluate # noqa: F811
|
||||
|
||||
@classmethod
|
||||
def interpret(cls, nodelist):
|
||||
while len(nodelist)==2: nodelist = nodelist[1]
|
||||
while len(nodelist) == 2:
|
||||
nodelist = nodelist[1]
|
||||
try:
|
||||
op = cls.get_op(nodelist[0])
|
||||
except KeyError:
|
||||
|
@ -1338,16 +1362,17 @@ class MarkerEvaluation(object):
|
|||
|
||||
@classmethod
|
||||
def evaluate(cls, nodelist):
|
||||
while len(nodelist)==2: nodelist = nodelist[1]
|
||||
while len(nodelist) == 2:
|
||||
nodelist = nodelist[1]
|
||||
kind = nodelist[0]
|
||||
name = nodelist[1]
|
||||
if kind==token.NAME:
|
||||
if kind == token.NAME:
|
||||
try:
|
||||
op = cls.values[name]
|
||||
except KeyError:
|
||||
raise SyntaxError("Unknown name %r" % name)
|
||||
return op()
|
||||
if kind==token.STRING:
|
||||
if kind == token.STRING:
|
||||
s = nodelist[1]
|
||||
if not cls._safe_string(s):
|
||||
raise SyntaxError(
|
||||
|
@ -1365,9 +1390,11 @@ class MarkerEvaluation(object):
|
|||
'\\' not in cand
|
||||
)
|
||||
|
||||
|
||||
invalid_marker = MarkerEvaluation.is_invalid_marker
|
||||
evaluate_marker = MarkerEvaluation.evaluate_marker
|
||||
|
||||
|
||||
class NullProvider:
|
||||
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
|
||||
|
||||
|
@ -1439,7 +1466,7 @@ class NullProvider:
|
|||
cache[script_filename] = (
|
||||
len(script_text), 0, script_text.split('\n'), script_filename
|
||||
)
|
||||
script_code = compile(script_text, script_filename,'exec')
|
||||
script_code = compile(script_text, script_filename, 'exec')
|
||||
exec(script_code, namespace, namespace)
|
||||
|
||||
def _has(self, path):
|
||||
|
@ -1465,13 +1492,7 @@ class NullProvider:
|
|||
def _get(self, path):
|
||||
if hasattr(self.loader, 'get_data'):
|
||||
return self.loader.get_data(path)
|
||||
raise NotImplementedError(
|
||||
"Can't perform this operation for loaders without 'get_data()'"
|
||||
)
|
||||
|
||||
register_loader_type(object, NullProvider)
|
||||
|
||||
|
||||
raise NotImplementedError( # noqa: F821
|
||||
class EggProvider(NullProvider):
|
||||
"""Provider based on a virtual filesystem"""
|
||||
|
||||
|
@ -1484,7 +1505,7 @@ class EggProvider(NullProvider):
|
|||
# of multiple eggs; that's why we use module_path instead of .archive
|
||||
path = self.module_path
|
||||
old = None
|
||||
while path!=old:
|
||||
while path != old:
|
||||
if path.lower().endswith('.egg'):
|
||||
self.egg_name = os.path.basename(path)
|
||||
self.egg_info = os.path.join(path, 'EGG-INFO')
|
||||
|
@ -1493,6 +1514,7 @@ class EggProvider(NullProvider):
|
|||
old = path
|
||||
path, base = os.path.split(path)
|
||||
|
||||
|
||||
class DefaultProvider(EggProvider):
|
||||
"""Provides access to package resources in the filesystem"""
|
||||
|
||||
|
@ -1512,6 +1534,7 @@ class DefaultProvider(EggProvider):
|
|||
with open(path, 'rb') as stream:
|
||||
return stream.read()
|
||||
|
||||
|
||||
register_loader_type(type(None), DefaultProvider)
|
||||
|
||||
if importlib_bootstrap is not None:
|
||||
|
@ -1522,13 +1545,14 @@ class EmptyProvider(NullProvider):
|
|||
"""Provider that returns nothing for all requests"""
|
||||
|
||||
_isdir = _has = lambda self, path: False
|
||||
_get = lambda self, path: ''
|
||||
_listdir = lambda self, path: []
|
||||
_get = lambda self, path: '' # noqqa: E731
|
||||
_listdir = lambda self, path: [] # noqq: E731
|
||||
module_path = None
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
empty_provider = EmptyProvider()
|
||||
|
||||
|
||||
|
@ -1694,7 +1718,7 @@ class ZipProvider(EggProvider):
|
|||
# so proceed.
|
||||
return real_path
|
||||
# Windows, del old file and retry
|
||||
elif os.name=='nt':
|
||||
elif os.name == 'nt':
|
||||
unlink(real_path)
|
||||
rename(tmpnam, real_path)
|
||||
return real_path
|
||||
|
@ -1714,7 +1738,7 @@ class ZipProvider(EggProvider):
|
|||
if not os.path.isfile(file_path):
|
||||
return False
|
||||
stat = os.stat(file_path)
|
||||
if stat.st_size!=size or stat.st_mtime!=timestamp:
|
||||
if stat.st_size != size or stat.st_mtime != timestamp:
|
||||
return False
|
||||
# check that the contents match
|
||||
zip_contents = self.loader.get_data(zip_path)
|
||||
|
@ -1764,6 +1788,7 @@ class ZipProvider(EggProvider):
|
|||
def _resource_to_zip(self, resource_name):
|
||||
return self._zipinfo_name(self._fn(self.module_path, resource_name))
|
||||
|
||||
|
||||
register_loader_type(zipimport.zipimporter, ZipProvider)
|
||||
|
||||
|
||||
|
@ -1783,11 +1808,11 @@ class FileMetadata(EmptyProvider):
|
|||
self.path = path
|
||||
|
||||
def has_metadata(self, name):
|
||||
return name=='PKG-INFO'
|
||||
return name == 'PKG-INFO'
|
||||
|
||||
def get_metadata(self, name):
|
||||
if name=='PKG-INFO':
|
||||
with open(self.path,'rU') as f:
|
||||
if name == 'PKG-INFO':
|
||||
with open(self.path, 'rU') as f:
|
||||
metadata = f.read()
|
||||
return metadata
|
||||
raise KeyError("No metadata except PKG-INFO is available")
|
||||
|
@ -1835,7 +1860,9 @@ class EggMetadata(ZipProvider):
|
|||
self.module_path = importer.archive
|
||||
self._setup_prefix()
|
||||
|
||||
_declare_state('dict', _distribution_finders = {})
|
||||
|
||||
_declare_state('dict', _distribution_finders={})
|
||||
|
||||
|
||||
def register_finder(importer_type, distribution_finder):
|
||||
"""Register `distribution_finder` to find distributions in sys.path items
|
||||
|
@ -1844,15 +1871,16 @@ def register_finder(importer_type, distribution_finder):
|
|||
handler), and `distribution_finder` is a callable that, passed a path
|
||||
item and the importer instance, yields ``Distribution`` instances found on
|
||||
that path item. See ``pkg_resources.find_on_path`` for an example."""
|
||||
_distribution_finders[importer_type] = distribution_finder
|
||||
_distribution_finders[importer_type] = distribution_finder # noqa: F821
|
||||
|
||||
|
||||
def find_distributions(path_item, only=False):
|
||||
"""Yield distributions accessible via `path_item`"""
|
||||
importer = get_importer(path_item)
|
||||
finder = _find_adapter(_distribution_finders, importer)
|
||||
finder = _find_adapter(_distribution_finders, importer) # noqa: F821
|
||||
return finder(importer, path_item, only)
|
||||
|
||||
|
||||
def find_eggs_in_zip(importer, path_item, only=False):
|
||||
"""
|
||||
Find eggs in zip files; possibly multiple nested eggs.
|
||||
|
@ -1873,12 +1901,17 @@ def find_eggs_in_zip(importer, path_item, only=False):
|
|||
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
|
||||
yield dist
|
||||
|
||||
|
||||
register_finder(zipimport.zipimporter, find_eggs_in_zip)
|
||||
|
||||
|
||||
def find_nothing(importer, path_item, only=False):
|
||||
return ()
|
||||
|
||||
|
||||
register_finder(object, find_nothing)
|
||||
|
||||
|
||||
def find_on_path(importer, path_item, only=False):
|
||||
"""Yield distributions accessible on a sys.path directory"""
|
||||
path_item = _normalize_cached(path_item)
|
||||
|
@ -1888,7 +1921,7 @@ def find_on_path(importer, path_item, only=False):
|
|||
# unpacked egg
|
||||
yield Distribution.from_filename(
|
||||
path_item, metadata=PathMetadata(
|
||||
path_item, os.path.join(path_item,'EGG-INFO')
|
||||
path_item, os.path.join(path_item, 'EGG-INFO')
|
||||
)
|
||||
)
|
||||
else:
|
||||
|
@ -1920,8 +1953,11 @@ def find_on_path(importer, path_item, only=False):
|
|||
for item in dists:
|
||||
yield item
|
||||
break
|
||||
|
||||
|
||||
register_finder(pkgutil.ImpImporter, find_on_path)
|
||||
|
||||
|
||||
if importlib_bootstrap is not None:
|
||||
register_finder(importlib_bootstrap.FileFinder, find_on_path)
|
||||
|
||||
|
@ -1944,7 +1980,8 @@ def register_namespace_handler(importer_type, namespace_handler):
|
|||
equivalent subpath. For an example namespace handler, see
|
||||
``pkg_resources.file_ns_handler``.
|
||||
"""
|
||||
_namespace_handlers[importer_type] = namespace_handler
|
||||
_namespace_handlers[importer_type] = namespace_handler # noqa: F821
|
||||
|
||||
|
||||
def _handle_ns(packageName, path_item):
|
||||
"""Ensure that named package includes a subpath of path_item (if needed)"""
|
||||
|
@ -1960,9 +1997,9 @@ def _handle_ns(packageName, path_item):
|
|||
module = sys.modules[packageName] = imp.new_module(packageName)
|
||||
module.__path__ = []
|
||||
_set_parent_ns(packageName)
|
||||
elif not hasattr(module,'__path__'):
|
||||
elif not hasattr(module, '__path__'):
|
||||
raise TypeError("Not a package:", packageName)
|
||||
handler = _find_adapter(_namespace_handlers, importer)
|
||||
handler = _find_adapter(_namespace_handlers, importer) # noqa: F821
|
||||
subpath = handler(importer, path_item, packageName, module)
|
||||
if subpath is not None:
|
||||
path = module.__path__
|
||||
|
@ -1973,19 +2010,20 @@ def _handle_ns(packageName, path_item):
|
|||
module.__path__.append(path_item)
|
||||
return subpath
|
||||
|
||||
|
||||
def declare_namespace(packageName):
|
||||
"""Declare that package 'packageName' is a namespace package"""
|
||||
|
||||
imp.acquire_lock()
|
||||
try:
|
||||
if packageName in _namespace_packages:
|
||||
if packageName in _namespace_packages: # noqa: F821
|
||||
return
|
||||
|
||||
path, parent = sys.path, None
|
||||
if '.' in packageName:
|
||||
parent = '.'.join(packageName.split('.')[:-1])
|
||||
declare_namespace(parent)
|
||||
if parent not in _namespace_packages:
|
||||
if parent not in _namespace_packages: # noqa: F821
|
||||
__import__(parent)
|
||||
try:
|
||||
path = sys.modules[parent].__path__
|
||||
|
@ -1994,8 +2032,8 @@ def declare_namespace(packageName):
|
|||
|
||||
# Track what packages are namespaces, so when new path items are added,
|
||||
# they can be updated
|
||||
_namespace_packages.setdefault(parent,[]).append(packageName)
|
||||
_namespace_packages.setdefault(packageName,[])
|
||||
_namespace_packages.setdefault(parent, []).append(packageName) # noqa: F821
|
||||
_namespace_packages.setdefault(packageName, []) # noqa: F821
|
||||
|
||||
for path_item in path:
|
||||
# Ensure all the parent's path items are reflected in the child,
|
||||
|
@ -2005,29 +2043,32 @@ def declare_namespace(packageName):
|
|||
finally:
|
||||
imp.release_lock()
|
||||
|
||||
|
||||
def fixup_namespace_packages(path_item, parent=None):
|
||||
"""Ensure that previously-declared namespace packages include path_item"""
|
||||
imp.acquire_lock()
|
||||
try:
|
||||
for package in _namespace_packages.get(parent,()):
|
||||
for package in _namespace_packages.get(parent, ()): # noqa: F821
|
||||
subpath = _handle_ns(package, path_item)
|
||||
if subpath:
|
||||
fixup_namespace_packages(subpath, package)
|
||||
finally:
|
||||
imp.release_lock()
|
||||
|
||||
|
||||
def file_ns_handler(importer, path_item, packageName, module):
|
||||
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
|
||||
|
||||
subpath = os.path.join(path_item, packageName.split('.')[-1])
|
||||
normalized = _normalize_cached(subpath)
|
||||
for item in module.__path__:
|
||||
if _normalize_cached(item)==normalized:
|
||||
if _normalize_cached(item) == normalized:
|
||||
break
|
||||
else:
|
||||
# Only return the path if it's not already there
|
||||
return subpath
|
||||
|
||||
|
||||
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
|
||||
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
|
||||
|
||||
|
@ -2038,6 +2079,7 @@ if importlib_bootstrap is not None:
|
|||
def null_ns_handler(importer, path_item, packageName, module):
|
||||
return None
|
||||
|
||||
|
||||
register_namespace_handler(object, null_ns_handler)
|
||||
|
||||
|
||||
|
@ -2045,6 +2087,7 @@ def normalize_path(filename):
|
|||
"""Normalize a file/dir name for comparison purposes"""
|
||||
return os.path.normcase(os.path.realpath(filename))
|
||||
|
||||
|
||||
def _normalize_cached(filename, _cache={}):
|
||||
try:
|
||||
return _cache[filename]
|
||||
|
@ -2052,6 +2095,7 @@ def _normalize_cached(filename, _cache={}):
|
|||
_cache[filename] = result = normalize_path(filename)
|
||||
return result
|
||||
|
||||
|
||||
def _set_parent_ns(packageName):
|
||||
parts = packageName.split('.')
|
||||
name = parts.pop()
|
||||
|
@ -2073,6 +2117,7 @@ def yield_lines(strs):
|
|||
for s in yield_lines(ss):
|
||||
yield s
|
||||
|
||||
|
||||
# whitespace and comment
|
||||
LINE_END = re.compile(r"\s*(#.*)?$").match
|
||||
# line continuation
|
||||
|
@ -2093,22 +2138,24 @@ EGG_NAME = re.compile(
|
|||
).match
|
||||
|
||||
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
|
||||
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get
|
||||
replace = {'pre': 'c', 'preview': 'c', '-': 'final-', 'rc': 'c', 'dev': '@'}.get
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in component_re.split(s):
|
||||
part = replace(part, part)
|
||||
if not part or part=='.':
|
||||
if not part or part == '.':
|
||||
continue
|
||||
if part[:1] in '0123456789':
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield '*'+part
|
||||
yield '*' + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield '*final'
|
||||
|
||||
|
||||
def parse_version(s):
|
||||
"""Convert a version string to a chronologically-sortable key
|
||||
|
||||
|
@ -2148,7 +2195,7 @@ def parse_version(s):
|
|||
while parts and parts[-1] == '*final-':
|
||||
parts.pop()
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1]=='00000000':
|
||||
while parts and parts[-1] == '00000000':
|
||||
parts.pop()
|
||||
parts.append(part)
|
||||
return tuple(parts)
|
||||
|
@ -2237,7 +2284,7 @@ class EntryPoint(object):
|
|||
ep = cls.parse(line, dist)
|
||||
if ep.name in this:
|
||||
raise ValueError("Duplicate entry point", group, ep.name)
|
||||
this[ep.name]=ep
|
||||
this[ep.name] = ep
|
||||
return this
|
||||
|
||||
@classmethod
|
||||
|
@ -2286,7 +2333,7 @@ class Distribution(object):
|
|||
self._provider = metadata or empty_provider
|
||||
|
||||
@classmethod
|
||||
def from_location(cls, location, basename, metadata=None,**kw):
|
||||
def from_location(cls, location, basename, metadata=None, **kw):
|
||||
project_name, version, py_version, platform = [None]*4
|
||||
basename, ext = os.path.splitext(basename)
|
||||
if ext.lower() in _distributionImpl:
|
||||
|
@ -2294,7 +2341,7 @@ class Distribution(object):
|
|||
match = EGG_NAME(basename)
|
||||
if match:
|
||||
project_name, version, py_version, platform = match.group(
|
||||
'name','ver','pyver','plat'
|
||||
'name', 'ver', 'pyver', 'plat'
|
||||
)
|
||||
cls = _distributionImpl[ext.lower()]
|
||||
return cls(
|
||||
|
@ -2364,7 +2411,7 @@ class Distribution(object):
|
|||
except AttributeError:
|
||||
for line in self._get_metadata(self.PKG_INFO):
|
||||
if line.lower().startswith('version:'):
|
||||
self._version = safe_version(line.split(':',1)[1].strip())
|
||||
self._version = safe_version(line.split(':', 1)[1].strip())
|
||||
return self._version
|
||||
else:
|
||||
tmpl = "Missing 'Version:' header and/or %s file"
|
||||
|
@ -2383,11 +2430,11 @@ class Distribution(object):
|
|||
extra, marker = extra.split(':', 1)
|
||||
if invalid_marker(marker):
|
||||
# XXX warn
|
||||
reqs=[]
|
||||
reqs = []
|
||||
elif not evaluate_marker(marker):
|
||||
reqs=[]
|
||||
reqs = []
|
||||
extra = safe_extra(extra) or None
|
||||
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
|
||||
dm.setdefault(extra, []).extend(parse_requirements(reqs))
|
||||
return dm
|
||||
|
||||
def requires(self, extras=()):
|
||||
|
@ -2478,14 +2525,14 @@ class Distribution(object):
|
|||
self._get_metadata('entry_points.txt'), self
|
||||
)
|
||||
if group is not None:
|
||||
return ep_map.get(group,{})
|
||||
return ep_map.get(group, {})
|
||||
return ep_map
|
||||
|
||||
def get_entry_info(self, group, name):
|
||||
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
|
||||
return self.get_entry_map(group).get(name)
|
||||
|
||||
def insert_on(self, path, loc = None):
|
||||
def insert_on(self, path, loc=None):
|
||||
"""Insert self.location in path before its nearest parent directory"""
|
||||
|
||||
loc = loc or self.location
|
||||
|
@ -2494,7 +2541,7 @@ class Distribution(object):
|
|||
|
||||
nloc = _normalize_cached(loc)
|
||||
bdir = os.path.dirname(nloc)
|
||||
npath= [(p and _normalize_cached(p) or p) for p in path]
|
||||
npath = [(p and _normalize_cached(p) or p) for p in path]
|
||||
|
||||
for p, item in enumerate(npath):
|
||||
if item == nloc:
|
||||
|
@ -2534,7 +2581,7 @@ class Distribution(object):
|
|||
loc = normalize_path(self.location)
|
||||
for modname in self._get_metadata('top_level.txt'):
|
||||
if (modname not in sys.modules or modname in nsp
|
||||
or modname in _namespace_packages):
|
||||
or modname in _namespace_packages): # noqa: F821
|
||||
continue
|
||||
if modname in ('pkg_resources', 'setuptools', 'site'):
|
||||
continue
|
||||
|
@ -2555,7 +2602,7 @@ class Distribution(object):
|
|||
return False
|
||||
return True
|
||||
|
||||
def clone(self,**kw):
|
||||
def clone(self, **kw):
|
||||
"""Copy this distribution, substituting in any changed keyword args"""
|
||||
names = 'project_name version py_version platform location precedence'
|
||||
for attr in names.split():
|
||||
|
@ -2618,7 +2665,7 @@ class DistInfoDistribution(Distribution):
|
|||
|
||||
def reqs_for_extra(extra):
|
||||
for req in reqs:
|
||||
if req.marker_fn(override={'extra':extra}):
|
||||
if req.marker_fn(override={'extra': extra}):
|
||||
yield req
|
||||
|
||||
common = frozenset(reqs_for_extra(None))
|
||||
|
@ -2638,7 +2685,7 @@ _distributionImpl = {
|
|||
}
|
||||
|
||||
|
||||
def issue_warning(*args,**kw):
|
||||
def issue_warning(*args, **kw):
|
||||
level = 1
|
||||
g = globals()
|
||||
try:
|
||||
|
@ -2759,7 +2806,7 @@ class Requirement:
|
|||
item = parse_version(item)
|
||||
last = None
|
||||
# -1, 0, 1
|
||||
compare = lambda a, b: (a > b) - (a < b)
|
||||
compare = lambda a, b: (a > b) - (a < b) # noqa: E731
|
||||
for parsed, trans, op, ver in self.index:
|
||||
# Indexing: 0, 1, -1
|
||||
action = trans[compare(item, parsed)]
|
||||
|
@ -2779,7 +2826,8 @@ class Requirement:
|
|||
def __hash__(self):
|
||||
return self.__hash
|
||||
|
||||
def __repr__(self): return "Requirement.parse(%r)" % str(self)
|
||||
def __repr__(self):
|
||||
return "Requirement.parse(%r)" % str(self)
|
||||
|
||||
@staticmethod
|
||||
def parse(s):
|
||||
|
@ -2790,6 +2838,7 @@ class Requirement:
|
|||
raise ValueError("Expected only one requirement", s)
|
||||
raise ValueError("No requirements found", s)
|
||||
|
||||
|
||||
state_machine = {
|
||||
# =><
|
||||
'<': '--T',
|
||||
|
@ -2804,10 +2853,12 @@ state_machine = {
|
|||
def _get_mro(cls):
|
||||
"""Get an mro for a type or classic class"""
|
||||
if not isinstance(cls, type):
|
||||
class cls(cls, object): pass
|
||||
class cls(cls, object):
|
||||
pass
|
||||
return cls.__mro__[1:]
|
||||
return cls.__mro__
|
||||
|
||||
|
||||
def _find_adapter(registry, ob):
|
||||
"""Return an adapter factory for `ob` from `registry`"""
|
||||
for t in _get_mro(getattr(ob, '__class__', type(ob))):
|
||||
|
@ -2857,12 +2908,13 @@ def split_sections(s):
|
|||
# wrap up last segment
|
||||
yield section, content
|
||||
|
||||
def _mkstemp(*args,**kw):
|
||||
|
||||
def _mkstemp(*args, **kw):
|
||||
old_open = os.open
|
||||
try:
|
||||
# temporarily bypass sandboxing
|
||||
os.open = os_open
|
||||
return tempfile.mkstemp(*args,**kw)
|
||||
return tempfile.mkstemp(*args, **kw)
|
||||
finally:
|
||||
# and then put it back
|
||||
os.open = old_open
|
||||
|
@ -2870,10 +2922,14 @@ def _mkstemp(*args,**kw):
|
|||
|
||||
# Set up global resource manager (deliberately not state-saved)
|
||||
_manager = ResourceManager()
|
||||
|
||||
|
||||
def _initialize(g):
|
||||
for name in dir(_manager):
|
||||
if not name.startswith('_'):
|
||||
g[name] = getattr(_manager, name)
|
||||
|
||||
|
||||
_initialize(globals())
|
||||
|
||||
# Prepare the master working set and make the ``require()`` API available
|
||||
|
@ -2890,6 +2946,6 @@ run_main = run_script
|
|||
# all distributions added to the working set in the future (e.g. by
|
||||
# calling ``require()``) will get activated as well.
|
||||
add_activation_listener(lambda dist: dist.activate())
|
||||
working_set.entries=[]
|
||||
working_set.entries = []
|
||||
# match order
|
||||
list(map(working_set.add_entry, sys.path))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from toolchain import PythonRecipe
|
||||
|
||||
|
||||
class PlyerRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/kivy/plyer/archive/{version}.zip"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
'''Recipe for pycrypto on ios
|
||||
'''
|
||||
from toolchain import CythonRecipe, shprint
|
||||
from os.path import join, exists
|
||||
from os.path import join
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
@ -11,8 +11,7 @@ class PycryptoRecipe(CythonRecipe):
|
|||
url = "https://ftp.dlitz.net/pub/dlitz/crypto/pycrypto/pycrypto-{version}.tar.gz"
|
||||
depends = ["python", "openssl"]
|
||||
include_per_arch = True
|
||||
library="libpycrypto.a"
|
||||
|
||||
library = "libpycrypto.a"
|
||||
|
||||
def build_arch(self, arch):
|
||||
build_env = arch.get_env()
|
||||
|
@ -26,9 +25,7 @@ class PycryptoRecipe(CythonRecipe):
|
|||
"--prefix=/",
|
||||
"--host={}".format(arch),
|
||||
"ac_cv_func_malloc_0_nonnull=yes",
|
||||
"ac_cv_func_realloc_0_nonnull=yes",
|
||||
)
|
||||
hostpython = sh.Command(self.ctx.hostpython)
|
||||
"ac_cv_func_realloc_0_nonnull=yes")
|
||||
super(PycryptoRecipe, self).build_arch(arch)
|
||||
|
||||
def install(self):
|
||||
|
@ -41,5 +38,5 @@ class PycryptoRecipe(CythonRecipe):
|
|||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = PycryptoRecipe()
|
||||
|
||||
recipe = PycryptoRecipe()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from toolchain import CythonRecipe
|
||||
|
||||
|
||||
class PyobjusRecipe(CythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/kivy/pyobjus/archive/{version}.zip"
|
||||
|
|
|
@ -6,6 +6,7 @@ from os.path import join
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PythonAliasRecipe(Recipe):
|
||||
is_alias = True
|
||||
|
||||
|
@ -26,4 +27,5 @@ class PythonAliasRecipe(Recipe):
|
|||
self.depends = [python]
|
||||
self.recipe_dir = join(ctx.root_dir, "recipes", python)
|
||||
|
||||
|
||||
recipe = PythonAliasRecipe()
|
||||
|
|
|
@ -4,8 +4,10 @@ import sh
|
|||
import os
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Python2Recipe(Recipe):
|
||||
version = "2.7.1"
|
||||
url = "https://www.python.org/ftp/python/{version}/Python-{version}.tar.bz2"
|
||||
|
@ -88,6 +90,7 @@ class Python2Recipe(Recipe):
|
|||
# architecture can lead to different pyconfig.h, we would need one patch
|
||||
# per arch. Instead, express here the line we don't want / we want.
|
||||
pyconfig = join(self.build_dir, "pyconfig.h")
|
||||
|
||||
def _remove_line(lines, pattern):
|
||||
for line in lines[:]:
|
||||
if pattern in line:
|
||||
|
@ -130,7 +133,6 @@ class Python2Recipe(Recipe):
|
|||
os.chdir(join(self.ctx.dist_dir, "root", "python2", "lib", "python2.7"))
|
||||
sh.find(".", "-iname", "*.pyc", "-exec", "rm", "{}", ";")
|
||||
sh.find(".", "-iname", "*.py", "-exec", "rm", "{}", ";")
|
||||
#sh.find(".", "-iname", "test*", "-exec", "rm", "-rf", "{}", ";")
|
||||
sh.rm("-rf", "wsgiref", "bsddb", "curses", "idlelib", "hotshot")
|
||||
sh.rm("-rf", sh.glob("lib*"))
|
||||
|
||||
|
|
|
@ -3,8 +3,10 @@ Stub functions for _scproxy on iOS
|
|||
No proxy is supported yet.
|
||||
'''
|
||||
|
||||
|
||||
def _get_proxy_settings():
|
||||
return {'exclude_simple': 1}
|
||||
|
||||
|
||||
def _get_proxies():
|
||||
return {}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = 'kivy-ios'
|
||||
from ._sqlite3 import *
|
||||
from ._sqlite3 import * # noqa: F401, F403
|
||||
|
|
|
@ -3,6 +3,7 @@ import os
|
|||
import sh
|
||||
from toolchain import PythonRecipe, shprint
|
||||
|
||||
|
||||
class PyYamlRecipe(PythonRecipe):
|
||||
version = "3.11"
|
||||
url = "https://pypi.python.org/packages/source/P/PyYAML/PyYAML-{version}.tar.gz"
|
||||
|
@ -18,4 +19,5 @@ class PyYamlRecipe(PythonRecipe):
|
|||
build_env['PYTHONPATH'] = os.path.join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
|
||||
recipe = PyYamlRecipe()
|
||||
|
|
|
@ -24,4 +24,5 @@ class LibSDL2MixerRecipe(Recipe):
|
|||
"-target", "libSDL_mixer-iOS",
|
||||
"-configuration", "Release")
|
||||
|
||||
|
||||
recipe = LibSDL2MixerRecipe()
|
||||
|
|
|
@ -30,4 +30,5 @@ class LibSDL2TTFRecipe(Recipe):
|
|||
join(self.get_build_dir(arch.arch), "SDL_ttf.h"),
|
||||
join(self.ctx.include_dir, "common", "SDL2"))
|
||||
|
||||
|
||||
recipe = LibSDL2TTFRecipe()
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# pure-python package, this can be removed when we'll support any python package
|
||||
from toolchain import PythonRecipe, shprint
|
||||
from os.path import join
|
||||
import sh, os
|
||||
import sh
|
||||
import os
|
||||
|
||||
|
||||
class WerkzeugRecipe(PythonRecipe):
|
||||
version = "master"
|
||||
url = "https://github.com/mitsuhiko/werkzeug/archive/{version}.zip"
|
||||
depends = ["python","openssl"]
|
||||
depends = ["python", "openssl"]
|
||||
|
||||
def install(self):
|
||||
arch = list(self.filtered_archs)[0]
|
||||
|
@ -18,5 +20,5 @@ class WerkzeugRecipe(PythonRecipe):
|
|||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
shprint(hostpython, "setup.py", "install", "--prefix", dest_dir, _env=build_env)
|
||||
|
||||
recipe = WerkzeugRecipe()
|
||||
|
||||
recipe = WerkzeugRecipe()
|
||||
|
|
|
@ -5,12 +5,13 @@ import sh
|
|||
import fnmatch
|
||||
from distutils.dir_util import copy_tree
|
||||
|
||||
|
||||
class ZbarLightRecipe(Recipe):
|
||||
version = '1.2'
|
||||
url = 'https://github.com/Polyconseil/zbarlight/archive/{version}.tar.gz'
|
||||
library = "zbarlight.a"
|
||||
depends = ['hostpython2','python2', 'libzbar']
|
||||
pbx_libraries = ["libz","libbz2",'libc++','libsqlite3','CoreMotion']
|
||||
depends = ['hostpython2', 'python2', 'libzbar']
|
||||
pbx_libraries = ["libz", "libbz2", 'libc++', 'libsqlite3', 'CoreMotion']
|
||||
include_per_arch = True
|
||||
|
||||
def get_zbar_env(self, arch):
|
||||
|
@ -25,7 +26,7 @@ class ZbarLightRecipe(Recipe):
|
|||
build_env["LIBRARY_PATH"] = join(arch.sysroot, "usr", "lib")
|
||||
build_env['PYTHONPATH'] = join(dest_dir, 'lib', 'python2.7', 'site-packages')
|
||||
build_env["CFLAGS"] = " ".join([
|
||||
" -I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "libzbar",'zbar')) +
|
||||
" -I{}".format(join(self.ctx.dist_dir, "include", arch.arch, "libzbar", 'zbar')) +
|
||||
" -arch {}".format(arch.arch)
|
||||
])
|
||||
build_env['LDFLAGS'] += " -lios -lpython -lzbar"
|
||||
|
@ -34,25 +35,26 @@ class ZbarLightRecipe(Recipe):
|
|||
def build_arch(self, arch):
|
||||
build_env = self.get_zbar_env(arch)
|
||||
hostpython = sh.Command(self.ctx.hostpython)
|
||||
shprint(hostpython, "setup.py", "build",
|
||||
shprint(hostpython, "setup.py", "build", # noqa: F821
|
||||
_env=build_env)
|
||||
self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
||||
self.apply_patch("zbarlight_1_2.patch") # Issue getting the version, hard coding for now
|
||||
self.biglink()
|
||||
|
||||
def install(self):
|
||||
arch = list(self.filtered_archs)[0]
|
||||
build_dir = join(self.get_build_dir(arch.arch),'build','lib.macosx-10.13-x86_64-2.7','zbarlight')
|
||||
dist_dir = join(self.ctx.dist_dir,'root','python2','lib','python2.7','site-packages','zbarlight')
|
||||
#Patch before Copying
|
||||
#self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
||||
build_dir = join(self.get_build_dir(arch.arch), 'build',
|
||||
'lib.macosx-10.13-x86_64-2.7', 'zbarlight')
|
||||
dist_dir = join(self.ctx.dist_dir, 'root', 'python2', 'lib',
|
||||
'python2.7', 'site-packages', 'zbarlight')
|
||||
# Patch before Copying
|
||||
# self.apply_patch("zbarlight_1_2.patch")#Issue getting the version, hard coding for now
|
||||
copy_tree(build_dir, dist_dir)
|
||||
os.remove(join(dist_dir,'_zbarlight.c'))
|
||||
|
||||
os.remove(join(dist_dir, '_zbarlight.c'))
|
||||
|
||||
def _patch__init__(self):
|
||||
init = join(self.ctx.dist_dir,'root','python2','lib','python2.7',
|
||||
'site-packages','zbarlight', "__init__.py")
|
||||
shprint(
|
||||
init = join(self.ctx.dist_dir, 'root', 'python2', 'lib', 'python2.7',
|
||||
'site-packages', 'zbarlight', "__init__.py")
|
||||
shprint( # noqa: F821
|
||||
sh.sed, "-i.bak",
|
||||
"s/__version__ = pkg_resources.get_distribution('zbarlight').version'"
|
||||
"/__version__ = '{version}'/g",
|
||||
|
@ -64,6 +66,7 @@ class ZbarLightRecipe(Recipe):
|
|||
if fnmatch.filter(filenames, "*.so.libs"):
|
||||
dirs.append(root)
|
||||
cmd = sh.Command(join(self.ctx.root_dir, "tools", "biglink"))
|
||||
shprint(cmd, join(self.build_dir, "zbarlight.a"), *dirs)
|
||||
shprint(cmd, join(self.build_dir, "zbarlight.a"), *dirs) # noqa: F821
|
||||
|
||||
|
||||
recipe = ZbarLightRecipe()
|
||||
|
|
|
@ -9,7 +9,7 @@ def test_kivy():
|
|||
import kivy
|
||||
import kivy.event
|
||||
import kivy.core.window
|
||||
import kivy.uix.widget
|
||||
import kivy.uix.widget # noqa: F401
|
||||
|
||||
|
||||
def test_audiostream():
|
||||
|
@ -39,7 +39,7 @@ def test_numpy():
|
|||
|
||||
|
||||
def test_curly():
|
||||
import curly
|
||||
import curly # noqa: F401
|
||||
|
||||
|
||||
def run_test(f, name):
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
print("Python 3 running!")
|
||||
import sys
|
||||
print(f"sys.path: {sys.path}")
|
||||
import os
|
||||
import traceback
|
||||
|
||||
modules_to_tests = [
|
||||
|
@ -21,17 +20,18 @@ for name in modules_to_tests:
|
|||
|
||||
# test pyobjus
|
||||
print("- import pyobjus start")
|
||||
import pyobjus
|
||||
import pyobjus # noqa: F401
|
||||
print("- import done")
|
||||
from pyobjus import autoclass
|
||||
NSNotificationCenter = autoclass("NSNotificationCenter")
|
||||
|
||||
# test ios
|
||||
import ios
|
||||
import ios # noqa: F401
|
||||
|
||||
from kivy.app import App
|
||||
from kivy.lang import Builder
|
||||
|
||||
|
||||
class TestApp(App):
|
||||
def build(self):
|
||||
return Builder.load_string("""
|
||||
|
@ -51,4 +51,5 @@ RelativeLayout:
|
|||
|
||||
""")
|
||||
|
||||
|
||||
TestApp().run()
|
||||
|
|
|
@ -7,6 +7,7 @@ import subprocess
|
|||
# resolve cython executable
|
||||
cython = None
|
||||
|
||||
|
||||
def resolve_cython():
|
||||
global cython
|
||||
for executable in ('cython', 'cython-2.7'):
|
||||
|
@ -17,6 +18,7 @@ def resolve_cython():
|
|||
cython = os.path.join(path, executable)
|
||||
return
|
||||
|
||||
|
||||
def do(fn):
|
||||
print('cythonize:', fn)
|
||||
assert(fn.endswith('.pyx'))
|
||||
|
@ -48,6 +50,7 @@ def do(fn):
|
|||
with open(fn_c, 'w') as fd:
|
||||
fd.write(data)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('-- cythonize', sys.argv)
|
||||
resolve_cython()
|
||||
|
|
7
tox.ini
7
tox.ini
|
@ -13,6 +13,13 @@ deps = flake8
|
|||
commands = flake8 recipes/ tools/ tests/ .ci/ toolchain.py
|
||||
|
||||
[flake8]
|
||||
exclude = tools/external/,
|
||||
toolchain.py, # Temporary removal: TODO: ZenCODE
|
||||
recipes/kivy, # Temporary removal: TODO: ZenCODE
|
||||
recipes/python3, # Temporary removal: TODO: ZenCODE
|
||||
recipes/hostpython3, # Temporary removal: TODO: ZenCODE
|
||||
recipes/host_setuptools3, # Temporary removal: TODO: ZenCODE
|
||||
|
||||
ignore =
|
||||
E123, # Closing bracket does not match indentation of opening bracket's line
|
||||
E124, # Closing bracket does not match visual indentation
|
||||
|
|
Loading…
Reference in a new issue